Ejemplo n.º 1
0
    def test_happy_flow_import(self):
        """
        Test that importing a CFF generates at least one DataType in DB.
        """
        TestConnectivityZip.import_test_connectivity96(self.test_user,
                                                       self.test_project,
                                                       subject=TEST_SUBJECT_A)

        field = FilterChain.datatype + '.subject'
        filters = FilterChain('', [field], [TEST_SUBJECT_A], ['=='])
        reference_connectivity = TestFactory.get_entity(self.test_project, Connectivity(), filters)

        dt_count_before = TestFactory.get_entity_count(self.test_project, Connectivity())

        self._import_csv_test_connectivity(reference_connectivity.gid, TEST_SUBJECT_B)

        dt_count_after = TestFactory.get_entity_count(self.test_project, Connectivity())
        assert dt_count_before + 1 == dt_count_after

        filters = FilterChain('', [field], [TEST_SUBJECT_B], ['like'])
        imported_connectivity = TestFactory.get_entity(self.test_project, Connectivity(), filters)

        # check relationship between the imported connectivity and the reference
        assert (reference_connectivity.centres == imported_connectivity.centres).all()
        assert (reference_connectivity.orientations == imported_connectivity.orientations).all()

        assert reference_connectivity.number_of_regions == imported_connectivity.number_of_regions
        assert (reference_connectivity.region_labels == imported_connectivity.region_labels).all()

        assert not (reference_connectivity.weights == imported_connectivity.weights).all()
        assert not (reference_connectivity.tract_lengths == imported_connectivity.tract_lengths).all()
Ejemplo n.º 2
0
 def test_filter_addition(self):
     """
     test addition in filter chain
     """
     filter1 = FilterChain(fields=[FilterChain.datatype + '.attribute_1'],
                           operations=["=="],
                           values=['test_val'])
     filter2 = FilterChain(fields=[FilterChain.datatype + '.attribute_2'],
                           operations=['in'],
                           values=[['test_val2', 1]])
     test_filter = filter1 + filter2
     self.__should_pass(
         test_filter,
         TestFiltering.DummyFilterClass(attribute_1='test_val',
                                        attribute_2=1))
     self.__should_pass(
         test_filter,
         TestFiltering.DummyFilterClass(attribute_1='test_val',
                                        attribute_2='test_val2'))
     self.__should_fail(
         test_filter,
         TestFiltering.DummyFilterClass(attribute_1='test_val',
                                        attribute_2=2))
     self.__should_fail(
         test_filter,
         TestFiltering.DummyFilterClass(attribute_1='test_val1',
                                        attribute_2=1))
Ejemplo n.º 3
0
 def get_input_tree(self):
     """
     Take as Input a Connectivity Object.
     """
     return [{
         'name': 'input_data',
         'label': 'Connectivity Matrix',
         'type': Connectivity,
         'required': True
     }, {
         'name':
         'surface_data',
         'label':
         'Brain Surface',
         'type':
         CorticalSurface,
         'description':
         'The Brain Surface is used to give you an idea of the connectivity position relative '
         'to the full brain cortical surface.  This surface will be displayed as a shadow '
         '(only used in 3D Edges viewer).'
     }, {
         'name':
         'colors',
         'label':
         'Node Colors',
         'type':
         ConnectivityMeasure,
         'conditions':
         FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                     operations=["=="],
                     values=[1]),
         'description':
         'A ConnectivityMesure DataType that establishes a colormap for the nodes '
         'displayed in the 2D Connectivity viewers.'
     }, {
         'name':
         'step',
         'label':
         'Color Threshold',
         'type':
         'float',
         'description':
         'All nodes with a value greater than this threshold will be displayed as red discs, '
         'otherwise they will be yellow. (This applies to 2D Connectivity Viewers and the '
         'threshold will depend on the metric used to set the Node Color)'
     }, {
         'name':
         'rays',
         'label':
         'Shapes Dimensions',
         'type':
         ConnectivityMeasure,
         'conditions':
         FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                     operations=["=="],
                     values=[1]),
         'description':
         'A ConnectivityMeasure datatype used to establish the size of the spheres representing '
         'each node. (It only applies to 3D Nodes viewer).'
     }]
    def test_filter_sql_equivalent(self):
        """
        Test applying a filter on DB.
        """
        data_type = Datatype1()
        data_type.row1 = "value1"
        data_type.row2 = "value2"
        datatypes_factory.DatatypesFactory()._store_datatype(data_type)
        data_type = Datatype1()
        data_type.row1 = "value3"
        data_type.row2 = "value2"
        datatypes_factory.DatatypesFactory()._store_datatype(data_type)
        data_type = Datatype1()
        data_type.row1 = "value1"
        data_type.row2 = "value3"
        datatypes_factory.DatatypesFactory()._store_datatype(data_type)

        test_filter_1 = FilterChain(fields=[FilterChain.datatype + '._row1'],
                                    operations=['=='], values=['value1'])
        test_filter_2 = FilterChain(fields=[FilterChain.datatype + '._row1'],
                                    operations=['=='], values=['vaue2'])
        test_filter_3 = FilterChain(fields=[FilterChain.datatype + '._row1', FilterChain.datatype + '._row2'],
                                    operations=['==', 'in'], values=["value1", ['value1', 'value2']])
        test_filter_4 = FilterChain(fields=[FilterChain.datatype + '._row1', FilterChain.datatype + '._row2'],
                                    operations=['==', 'in'], values=["value1", ['value5', 'value6']])
        
        all_stored_dts = self.count_all_entities(Datatype1)
        self.assertEqual(3, all_stored_dts)
        
        self._evaluate_db_filter(test_filter_1, 2)
        self._evaluate_db_filter(test_filter_2, 0)
        self._evaluate_db_filter(test_filter_3, 1)
        self._evaluate_db_filter(test_filter_4, 0)
Ejemplo n.º 5
0
    def get_input_tree(self):
        """
        Take as Input a Connectivity Object.
        """

        filters_ui = [UIFilter(linked_elem_name="colors",
                               linked_elem_field=FilterChain.datatype + "._connectivity"),
                      UIFilter(linked_elem_name="rays",
                               linked_elem_field=FilterChain.datatype + "._connectivity")]

        json_ui_filter = json.dumps([ui_filter.to_dict() for ui_filter in filters_ui])


        return [{'name': 'input_data', 'label': 'Connectivity Matrix', 'type': Connectivity,
                 'required': True, KWARG_FILTERS_UI: json_ui_filter},
                {'name': 'surface_data', 'label': 'Brain Surface', 'type': CorticalSurface,
                 'description': 'The Brain Surface is used to give you an idea of the connectivity position relative '
                                'to the full brain cortical surface.  This surface will be displayed as a shadow '
                                '(only used in 3D Edges tab).'},
                {'name': 'colors', 'label': 'Node Colors', 'type': ConnectivityMeasure,
                 'conditions': FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                                           operations=["=="], values=[1]),
                 'description': 'A ConnectivityMeasure DataType that establishes a colormap for the nodes '
                                'displayed in the 2D Connectivity tabs.'},
                {'name': 'step', 'label': 'Color Threshold', 'type': 'float',
                 'description': 'All nodes with a value greater or equal (>=) than this threshold will be displayed '
                                'as red discs, otherwise (<) they will be yellow. (This applies to 2D Connectivity  '
                                'tabs and the threshold will depend on the metric used to set the Node Color)'},
                {'name': 'rays', 'label': 'Shapes Dimensions', 'type': ConnectivityMeasure,
                 'conditions': FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                                           operations=["=="], values=[1]),
                 'description': 'A ConnectivityMeasure datatype used to establish the size of the spheres representing '
                                'each node. (It only applies to 3D Nodes tab).'}]
Ejemplo n.º 6
0
    def getfiltereddatatypes(self, name, parent_div, tree_session_key, filters):
        """
        Given the name from the input tree, the dataType required and a number of
        filters, return the available dataType that satisfy the conditions imposed.
        """
        previous_tree = self.context.get_session_tree_for_key(tree_session_key)
        if previous_tree is None:
            common.set_error_message("Adapter Interface not in session for filtering!")
            raise cherrypy.HTTPRedirect("/tvb?error=True")
        current_node = self._get_node(previous_tree, name)
        if current_node is None:
            raise Exception("Could not find node :" + name)
        datatype = current_node[ABCAdapter.KEY_DATATYPE]

        filters = json.loads(filters)
        availablefilter = json.loads(FilterChain.get_filters_for_type(datatype))
        for i, filter_ in enumerate(filters[FILTER_FIELDS]):
            # Check for filter input of type 'date' as these need to be converted
            if filter_ in availablefilter and availablefilter[filter_][FILTER_TYPE] == 'date':
                try:
                    temp_date = string2date(filters[FILTER_VALUES][i], False)
                    filters[FILTER_VALUES][i] = temp_date
                except ValueError:
                    raise
        # In order for the filter object not to "stack up" on multiple calls to
        # this method, create a deepCopy to work with
        if ABCAdapter.KEY_CONDITION in current_node:
            new_filter = copy.deepcopy(current_node[ABCAdapter.KEY_CONDITION])
        else:
            new_filter = FilterChain()
        new_filter.fields.extend(filters[FILTER_FIELDS])
        new_filter.operations.extend(filters[FILTER_OPERATIONS])
        new_filter.values.extend(filters[FILTER_VALUES])
        # Get dataTypes that match the filters from DB then populate with values
        values, total_count = InputTreeManager().populate_option_values_for_dtype(
            common.get_current_project().id,
            datatype, new_filter,
            self.context.get_current_step())
        # Create a dictionary that matches what the template expects
        parameters = {ABCAdapter.KEY_NAME: name,
                      ABCAdapter.KEY_FILTERABLE: availablefilter,
                      ABCAdapter.KEY_TYPE: ABCAdapter.TYPE_SELECT,
                      ABCAdapter.KEY_OPTIONS: values,
                      ABCAdapter.KEY_DATATYPE: datatype}

        if total_count > MAXIMUM_DATA_TYPES_DISPLAYED:
            parameters[KEY_WARNING] = WARNING_OVERFLOW

        if ABCAdapter.KEY_REQUIRED in current_node:
            parameters[ABCAdapter.KEY_REQUIRED] = current_node[ABCAdapter.KEY_REQUIRED]
            if len(values) > 0 and string2bool(str(parameters[ABCAdapter.KEY_REQUIRED])):
                parameters[ABCAdapter.KEY_DEFAULT] = str(values[-1][ABCAdapter.KEY_VALUE])
        previous_selected = self.context.get_current_default(name)
        if previous_selected in [str(vv['value']) for vv in values]:
            parameters[ABCAdapter.KEY_DEFAULT] = previous_selected

        template_specification = {"inputRow": parameters, "disabled": False,
                                  "parentDivId": parent_div, common.KEY_SESSION_TREE: tree_session_key}
        return self.fill_default_attributes(template_specification)
Ejemplo n.º 7
0
    def test_get_filtered_by_column(self):
        """
        Test the filter function when retrieving dataTypes with a filter
        after a column from a class specific table (e.g. DATA_arraywrapper).
        """
        operation_1 = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)
        operation_2 = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)

        one_dim_array = numpy.arange(5)
        two_dim_array = numpy.array([[1, 2], [2, 3], [1, 4]])
        self._store_float_array(one_dim_array, "John Doe 1", operation_1.id)
        self._store_float_array(one_dim_array, "John Doe 2", operation_1.id)
        self._store_float_array(two_dim_array, "John Doe 3", operation_2.id)

        count = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 3, "Problems with inserting data")
        first_filter = FilterChain(
            fields=[FilterChain.datatype + '._nr_dimensions'],
            operations=["=="],
            values=[1])
        count = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray",
            first_filter)[1]
        self.assertEqual(count, 2, "Data was not filtered")

        second_filter = FilterChain(
            fields=[FilterChain.datatype + '._nr_dimensions'],
            operations=["=="],
            values=[2])
        filtered_data = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray",
            second_filter)[0]
        self.assertEqual(len(filtered_data), 1, "Data was not filtered")
        self.assertEqual(filtered_data[0][3], "John Doe 3")

        third_filter = FilterChain(
            fields=[FilterChain.datatype + '._length_1d'],
            operations=["=="],
            values=[3])
        filtered_data = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray",
            third_filter)[0]
        self.assertEqual(len(filtered_data), 1,
                         "Data was not filtered correct")
        self.assertEqual(filtered_data[0][3], "John Doe 3")
        try:
            if os.path.exists('One_dim.txt'):
                os.remove('One_dim.txt')
            if os.path.exists('Two_dim.txt'):
                os.remove('Two_dim.txt')
            if os.path.exists('One_dim-1.txt'):
                os.remove('One_dim-1.txt')
        except Exception:
            pass
Ejemplo n.º 8
0
 def test_filter_add_condition(self):
     """
     Test that adding a condition to a filter is working.
     """
     test_filter = FilterChain(fields = [FilterChain.datatype + '.attribute_1'], 
                               operations = ["=="], values = ['test_val'])
     filter_input = FilteringTest.DummyFilterClass(attribute_1 = 'test_val', attribute_2 = 1)
     self.__should_pass(test_filter, filter_input)
     test_filter.add_condition(FilterChain.datatype + '.attribute_2', '==', 2)
     self.__should_fail(test_filter, filter_input)
Ejemplo n.º 9
0
    def fill_input_tree_with_options(self, attributes_list, project_id, category_key):
        """
        For a datatype node in the input tree, load all instances from the db that fit the filters.
        """
        result = []
        for param in attributes_list:
            if getattr(param, KEY_UI_HIDE, False):
                continue
            transformed_param = copy(param)

            if isinstance(param, (itr.DatatypeNode, itr.ComplexDtypeNode)):
                filter_condition = param.conditions
                if filter_condition is None:
                    filter_condition = FilterChain('')
                filter_condition.add_condition(FilterChain.datatype + ".visible", "==", True)

                complex_dt_attributes = None
                if isinstance(param, itr.ComplexDtypeNode):
                    complex_dt_attributes = self.fill_input_tree_with_options(param.attributes,
                                                                    project_id, category_key)
                values, total_count = self.populate_option_values_for_dtype(project_id, param.type, filter_condition,
                                                                    category_key, complex_dt_attributes)
                if total_count > MAXIMUM_DATA_TYPES_DISPLAYED:
                    transformed_param.warning = WARNING_OVERFLOW

                if param.required and len(values) > 0 and param.default is None:
                    transformed_param.default = str(values[-1][KEY_VALUE])

                transformed_param.filterable = FilterChain.get_filters_for_type(param.type)
                transformed_param.type = TYPE_SELECT # todo this type transfer is not nice
                transformed_param.datatype = param.type
                # If Portlet dynamic parameter, don't add the options instead
                # just add the default value.
                if getattr(param, KEY_DYNAMIC, False):
                    dynamic_param = {KEY_NAME: param.default,
                                     KEY_VALUE: param.default}
                    transformed_param.options = [dynamic_param]
                else:
                    transformed_param.options = values

                ### DataType-attributes are no longer necessary, they were already copied on each OPTION
                transformed_param.attributes = [] # todo check if this is ok
            elif isinstance(param, itr.SelectTypeNode):
                transformed_param.options = self.fill_input_tree_with_options(param.options,
                                                                              project_id, category_key)
                if len(param.options) > 0 and param.default is None:
                    transformed_param.default = str(param.options[-1].value)
            elif isinstance(param, (itr.TypeNode, itr.DictNode)):  #ComplexDatatypeNode enters here!
                transformed_param.attributes = self.fill_input_tree_with_options(param.attributes,
                                                                                  project_id, category_key)

            result.append(transformed_param)
        return result
Ejemplo n.º 10
0
 def test_invalid_filter(self):
     """
     Error test-case when evaluating filter in Python.
     """
     test_filter = FilterChain(
         fields=[FilterChain.datatype + '.attribute_1'],
         operations=["in"],
         values=[None])
     with pytest.raises(InvalidFilterEntity):
         test_filter.get_python_filter_equivalent(
             TestFiltering.DummyFilterClass(
                 attribute_1=['test_val', 'test2']))
Ejemplo n.º 11
0
 def test_invalid_input(self):
     """
     Error test-case.
     """
     test_filter = FilterChain(
         fields=[FilterChain.datatype + '.other_attribute_1'],
         operations=["in"],
         values=['test'])
     with pytest.raises(InvalidFilterChainInput):
         test_filter.get_python_filter_equivalent(
             TestFiltering.DummyFilterClass(
                 attribute_1=['test_val', 'test2']))
Ejemplo n.º 12
0
 def _build_custom_filter(filter_data):
     """
     Param filter_data should be at this point a dictionary of the form:
     {'type' : 'fitler_type', 'value' : 'fitler_value'}
     If 'filter_type' is not handled just return None.
     """
     filter_data = json.loads(filter_data)
     if filter_data['type'] == 'from_burst':
         return FilterChain('Burst', [FilterChain.datatype + '.fk_parent_burst'],
                                             [filter_data['value']], operations=["=="])
     if filter_data['type'] == 'from_datatype':
         return FilterChain('Datatypes', [FilterChain.operation + '.parameters'],
                                                 [filter_data['value']], operations=["like"])
     return None
Ejemplo n.º 13
0
 def get_input_tree(self):
     return [{'name': 'data_0', 'label': 'Connectivity Measures 1',
              'type': ConnectivityMeasure, 'required': True,
              'conditions': FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                                        operations=["=="], values=[1]),
              'description': 'Punctual values for each node in the connectivity matrix. '
                             'This will give the colors of the resulting topographic image.'},
             {'name': 'data_1', 'label': 'Connectivity Measures 2', 'type': ConnectivityMeasure,
              'conditions': FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                                        operations=["=="], values=[1]),
              'description': 'Comparative values'},
             {'name': 'data_2', 'label': 'Connectivity Measures 3', 'type': ConnectivityMeasure,
              'conditions': FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                                        operations=["=="], values=[1]),
              'description': 'Comparative values'}]
Ejemplo n.º 14
0
 def test_complex_filter(self):
     """
     Test a filter with at least 2 conditions
     """
     test_filter = FilterChain(fields=[
         FilterChain.datatype + '.attribute_1',
         FilterChain.datatype + '.attribute_2'
     ],
                               operations=["==", 'in'],
                               values=['test_val', ['test_val2', 1]])
     self.__should_pass(
         test_filter,
         TestFiltering.DummyFilterClass(attribute_1='test_val',
                                        attribute_2=1))
     self.__should_pass(
         test_filter,
         TestFiltering.DummyFilterClass(attribute_1='test_val',
                                        attribute_2=1))
     self.__should_fail(
         test_filter,
         TestFiltering.DummyFilterClass(attribute_1='test_val',
                                        attribute_2=2))
     self.__should_fail(
         test_filter,
         TestFiltering.DummyFilterClass(attribute_1='test_val1',
                                        attribute_2=1))
Ejemplo n.º 15
0
 def get_input_tree(self):
     return [{
         'name':
         'measure',
         'label':
         'Measure',
         'type':
         MappedArray,
         'required':
         True,
         'description':
         'A measure to view on anatomy',
         'conditions':
         FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                     operations=[">="],
                     values=[2])
     }, {
         'name': 'region_mapping_volume',
         'label': 'Region mapping',
         'type': RegionVolumeMapping,
         'required': False,
     }, {
         'name': 'data_slice',
         'label': 'slice indices in numpy syntax',
         'type': 'str',
         'required': False
     },
             _MappedArrayVolumeBase.get_background_input_tree()]
Ejemplo n.º 16
0
    def _import(self, import_file_path, surface_gid, connectivity_gid):
        """
        This method is used for importing region mappings
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group(
            'tvb.adapters.uploaders.region_mapping_importer',
            'RegionMapping_Importer')
        importer = ABCAdapter.build_adapter(group)

        args = {
            'mapping_file': import_file_path,
            'surface': surface_gid,
            'connectivity': connectivity_gid,
            DataTypeMetaData.KEY_SUBJECT: "test"
        }

        now = datetime.datetime.now()

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        # During setup we import a CFF which creates an additional RegionMapping
        # So, here we have to find our mapping (just imported)
        data_filter = FilterChain(
            fields=[FilterChain.datatype + ".create_date"],
            operations=[">"],
            values=[now])
        region_mapping = self._get_entity(RegionMapping(), data_filter)

        return region_mapping
Ejemplo n.º 17
0
    def getfiltereddatatypes(self, name, parent_div, tree_session_key,
                             filters):
        """
        Given the name from the input tree, the dataType required and a number of
        filters, return the available dataType that satisfy the conditions imposed.
        """
        previous_tree = self.context.get_session_tree_for_key(tree_session_key)
        if previous_tree is None:
            base.set_error_message(
                "Adapter Interface not in session for filtering!")
            raise cherrypy.HTTPRedirect("/tvb?error=True")
        current_node = self._get_node(previous_tree, name)
        if current_node is None:
            raise Exception("Could not find node :" + name)
        datatype = current_node[ABCAdapter.KEY_DATATYPE]

        filters = json.loads(filters)
        availablefilter = json.loads(
            FilterChain.get_filters_for_type(datatype))
        for i, filter_ in enumerate(filters[FILTER_FIELDS]):
            #Check for filter input of type 'date' as these need to be converted
            if filter_ in availablefilter and availablefilter[filter_][
                    FILTER_TYPE] == 'date':
                try:
                    filter_ = string2date(filter_, False)
                    filters[FILTER_VALUES][i] = filter_
                except ValueError, excep:
                    raise excep
Ejemplo n.º 18
0
    def _get_launchable_algorithms(self, datatype_gid, categories):

        datatype_instance = dao.get_datatype_by_gid(datatype_gid)
        data_class = datatype_instance.__class__
        all_compatible_classes = [data_class.__name__]
        for one_class in getmro(data_class):
            if issubclass(
                    one_class, MappedType
            ) and one_class.__name__ not in all_compatible_classes:
                all_compatible_classes.append(one_class.__name__)

        self.logger.debug("Searching in categories: " + str(categories) +
                          " for classes " + str(all_compatible_classes))
        categories_ids = [categ.id for categ in categories]
        launchable_adapters = dao.get_applicable_adapters(
            all_compatible_classes, categories_ids)

        filtered_adapters = []
        for stored_adapter in launchable_adapters:
            filter_chain = FilterChain.from_json(
                stored_adapter.datatype_filter)
            if not filter_chain or filter_chain.get_python_filter_equivalent(
                    datatype_instance):
                filtered_adapters.append(stored_adapter)

        return datatype_instance, filtered_adapters
    def _import(self, import_file_path, surface_gid, connectivity_gid):
        """
        This method is used for importing region mappings
        :param import_file_path: absolute path of the file to be imported
        """

        # Retrieve Adapter instance
        test_subject = "test"
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.region_mapping_importer',
            'RegionMapping_Importer')
        args = {
            'mapping_file': import_file_path,
            'surface': surface_gid,
            'connectivity': connectivity_gid,
            DataTypeMetaData.KEY_SUBJECT: test_subject
        }

        # Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        # During setup we import a CFF which creates an additional RegionMapping
        # So, here we have to find our mapping (just imported)
        data_filter = FilterChain(fields=[FilterChain.datatype + ".subject"],
                                  operations=["=="],
                                  values=[test_subject])
        region_mapping = self._get_entity(RegionMapping, data_filter)

        return region_mapping
 def get_input_tree(self):
     return [
         {
             'name': 'region_mapping_volume',
             'label': 'Region mapping',
             'type': RegionVolumeMapping,
             'required': True,
         },
         {
             'name':
             'connectivity_measure',
             'label':
             'Connectivity measure',
             'type':
             ConnectivityMeasure,
             'required':
             False,
             'description':
             'A connectivity measure',
             'conditions':
             FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                         operations=["=="],
                         values=[1])
         },
     ]
Ejemplo n.º 21
0
 def get_input_tree(self):
     return [{
         'name':
         'time_series',
         'label':
         'Time Series (Region or Surface)',
         'type':
         TimeSeries,
         'required':
         True,
         'conditions':
         FilterChain(fields=[
             FilterChain.datatype + '.type',
             FilterChain.datatype + '._has_surface_mapping'
         ],
                     operations=["in", "=="],
                     values=[['TimeSeriesRegion', 'TimeSeriesSurface'],
                             True])
     }, {
         'name':
         'shell_surface',
         'label':
         'Shell Surface',
         'type':
         Surface,
         'required':
         False,
         'description':
         "Surface to be displayed semi-transparently, for visual purposes only."
     }]
Ejemplo n.º 22
0
    def get_input_tree(self):
        """
        Compute interface based on introspected algorithms found.
        """
        algorithm = BaseTimeseriesMetricAlgorithm()
        algorithm.trait.bound = self.INTERFACE_ATTRIBUTES_ONLY
        tree = algorithm.interface[self.INTERFACE_ATTRIBUTES]
        tree[0]['conditions'] = FilterChain(
            fields=[FilterChain.datatype + '._nr_dimensions'],
            operations=["=="],
            values=[4])

        algo_names = self.available_algorithms.keys()
        options = []
        for name in algo_names:
            options.append({
                ABCAdapter.KEY_NAME: name,
                ABCAdapter.KEY_VALUE: name
            })
        tree.append({
            'name':
            'algorithms',
            'label':
            'Selected metrics to be applied',
            'type':
            ABCAdapter.TYPE_MULTIPLE,
            'required':
            False,
            'options':
            options,
            'description':
            'The selected metric algorithms will be applied on the input TimeSeries'
        })

        return tree
Ejemplo n.º 23
0
    def getfiltereddatatypes(self, name, parent_div, tree_session_key, filters):
        """
        Given the name from the input tree, the dataType required and a number of
        filters, return the available dataType that satisfy the conditions imposed.
        """
        previous_tree = self.context.get_session_tree_for_key(tree_session_key)
        if previous_tree is None:
            common.set_error_message("Adapter Interface not in session for filtering!")
            raise cherrypy.HTTPRedirect("/tvb?error=True")
        current_node = self._get_node(previous_tree, name)
        if current_node is None:
            raise Exception("Could not find node :" + name)
        datatype = current_node[ABCAdapter.KEY_DATATYPE]

        filters = json.loads(filters)
        availablefilter = json.loads(FilterChain.get_filters_for_type(datatype))
        for i, filter_ in enumerate(filters[FILTER_FIELDS]):
            #Check for filter input of type 'date' as these need to be converted
            if filter_ in availablefilter and availablefilter[filter_][FILTER_TYPE] == 'date':
                try:
                    temp_date = string2date(filters[FILTER_VALUES][i], False)
                    filters[FILTER_VALUES][i] = temp_date
                except ValueError:
                    raise
        #In order for the filter object not to "stack up" on multiple calls to
        #this method, create a deepCopy to work with
        if ABCAdapter.KEY_CONDITION in current_node:
            new_filter = copy.deepcopy(current_node[ABCAdapter.KEY_CONDITION])
        else:
            new_filter = FilterChain()
        new_filter.fields.extend(filters[FILTER_FIELDS])
        new_filter.operations.extend(filters[FILTER_OPERATIONS])
        new_filter.values.extend(filters[FILTER_VALUES])
        #Get dataTypes that match the filters from DB then populate with values
        values, total_count = InputTreeManager().populate_option_values_for_dtype(
                                    common.get_current_project().id,
                                    datatype, new_filter,
                                    self.context.get_current_step() )
        #Create a dictionary that matches what the template expects
        parameters = {ABCAdapter.KEY_NAME: name,
                      ABCAdapter.KEY_FILTERABLE: availablefilter,
                      ABCAdapter.KEY_TYPE: ABCAdapter.TYPE_SELECT,
                      ABCAdapter.KEY_OPTIONS: values,
                      ABCAdapter.KEY_DATATYPE: datatype}

        if total_count > MAXIMUM_DATA_TYPES_DISPLAYED:
            parameters[KEY_WARNING] = WARNING_OVERFLOW

        if ABCAdapter.KEY_REQUIRED in current_node:
            parameters[ABCAdapter.KEY_REQUIRED] = current_node[ABCAdapter.KEY_REQUIRED]
            if len(values) > 0 and string2bool(str(parameters[ABCAdapter.KEY_REQUIRED])):
                parameters[ABCAdapter.KEY_DEFAULT] = str(values[-1][ABCAdapter.KEY_VALUE])
        previous_selected = self.context.get_current_default(name)
        if previous_selected in [str(vv['value']) for vv in values]:
            parameters[ABCAdapter.KEY_DEFAULT] = previous_selected

        template_specification = {"inputRow": parameters, "disabled": False,
                                  "parentDivId": parent_div, common.KEY_SESSION_TREE: tree_session_key}
        return self.fill_default_attributes(template_specification)
Ejemplo n.º 24
0
    def test_bad_reference(self):
        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        field = FilterChain.datatype + '.subject'
        filters = FilterChain('', [field], [TEST_SUBJECT_A], ['!='])
        bad_reference_connectivity = TestFactory.get_entity(self.test_project, Connectivity(), filters)

        with pytest.raises(OperationException):
            self._import_csv_test_connectivity(bad_reference_connectivity.gid, TEST_SUBJECT_A)
Ejemplo n.º 25
0
def search_and_export_ts(project_id, export_folder=os.path.join("~", "TVB")):

    #### This is the simplest filter you could write: filter and entity by Subject
    filter_connectivity = FilterChain(
        fields=[FilterChain.datatype + '.subject'],
        operations=["=="],
        values=[DataTypeMetaData.DEFAULT_SUBJECT])

    connectivities = _retrieve_entities_by_filters(Connectivity, project_id,
                                                   filter_connectivity)

    #### A more complex filter: by linked entity (connectivity), BOLD monitor, sampling, operation param:
    filter_timeseries = FilterChain(
        fields=[
            FilterChain.datatype + '._connectivity',
            FilterChain.datatype + '._title',
            FilterChain.datatype + '._sample_period',
            FilterChain.datatype + '._sample_rate',
            FilterChain.operation + '.parameters'
        ],
        operations=["==", "like", ">=", "<=", "like"],
        values=[
            connectivities[0].gid, "Bold", "500", "0.002",
            '"conduction_speed": "3.0"'
        ])

    #### If you want to filter another type of TS, change the kind class bellow,
    #### instead of TimeSeriesRegion use TimeSeriesEEG, or TimeSeriesSurface, etc.
    timeseries = _retrieve_entities_by_filters(TimeSeriesRegion, project_id,
                                               filter_timeseries)

    for ts in timeseries:
        print("=============================")
        print(ts.summary_info)
        print(" Original file: " + str(ts.get_storage_file_path()))
        destination_file = os.path.expanduser(
            os.path.join(export_folder, ts.get_storage_file_name()))
        FilesHelper.copy_file(ts.get_storage_file_path(), destination_file)
        if os.path.exists(destination_file):
            print(" TS file copied at: " + destination_file)
        else:
            print(
                " Some error happened when trying to copy at destination folder!!"
            )
Ejemplo n.º 26
0
    def test_filter_sql_equivalent(self):
        """
        Test applying a filter on DB.
        """
        data_type = Datatype1()
        data_type.row1 = "value1"
        data_type.row2 = "value2"
        datatypes_factory.DatatypesFactory()._store_datatype(data_type)
        data_type = Datatype1()
        data_type.row1 = "value3"
        data_type.row2 = "value2"
        datatypes_factory.DatatypesFactory()._store_datatype(data_type)
        data_type = Datatype1()
        data_type.row1 = "value1"
        data_type.row2 = "value3"
        datatypes_factory.DatatypesFactory()._store_datatype(data_type)

        test_filter_1 = FilterChain(fields=[FilterChain.datatype + '._row1'],
                                    operations=['=='],
                                    values=['value1'])
        test_filter_2 = FilterChain(fields=[FilterChain.datatype + '._row1'],
                                    operations=['=='],
                                    values=['vaue2'])
        test_filter_3 = FilterChain(fields=[
            FilterChain.datatype + '._row1', FilterChain.datatype + '._row2'
        ],
                                    operations=['==', 'in'],
                                    values=["value1", ['value1', 'value2']])
        test_filter_4 = FilterChain(fields=[
            FilterChain.datatype + '._row1', FilterChain.datatype + '._row2'
        ],
                                    operations=['==', 'in'],
                                    values=["value1", ['value5', 'value6']])

        all_stored_dts = self.get_all_entities(Datatype1)
        self.assertTrue(
            len(all_stored_dts) == 3, "Expected 3 DTs to be stored for "
            "test_filte_sql_equivalent. Got %s instead." %
            (len(all_stored_dts, )))

        self._evaluate_db_filter(test_filter_1, 2)
        self._evaluate_db_filter(test_filter_2, 0)
        self._evaluate_db_filter(test_filter_3, 1)
        self._evaluate_db_filter(test_filter_4, 0)
Ejemplo n.º 27
0
 def build_datatype_filters(selected=RELEVANT_VIEW, single_filter=None):
     """
     Return all visibility filters for data structure page, or only one filter.
     """
     filters = {StaticFiltersFactory.FULL_VIEW: FilterChain(StaticFiltersFactory.FULL_VIEW),
                StaticFiltersFactory.RELEVANT_VIEW: FilterChain(StaticFiltersFactory.RELEVANT_VIEW,
                                                                [FilterChain.datatype + '.visible'],
                                                                [True], operations=["=="])}
     if selected is None or len(selected) == 0:
         selected = StaticFiltersFactory.RELEVANT_VIEW
     if selected in filters:
         filters[selected].selected = True
     if single_filter is not None:
         if single_filter in filters:
             return filters[single_filter]
         else:
             ### We have some custom filter to build
             return StaticFiltersFactory._build_custom_filter(single_filter)
     return filters.values()
Ejemplo n.º 28
0
 def get_input_tree(self):
     return [{'name': 'time_series', 'label': 'Time Series (Region or Surface)',
              'type': TimeSeries, 'required': True,
              'conditions': FilterChain(fields=[FilterChain.datatype + '.type',
                                                FilterChain.datatype + '._nr_dimensions'],
                                        operations=["in", "=="],
                                        values=[['TimeSeriesRegion', 'TimeSeriesSurface'], 4]),
              'description': 'Depending on the simulation length and your browser capabilities, you might experience'
                             ' after multiple runs, browser crashes. In such cases, it is recommended to empty the'
                             ' browser cache and try again. Sorry for the inconvenience.'}]
Ejemplo n.º 29
0
 def get_input_tree(self):
     """
     Take as Input a Connectivity Object.
     """
     return [{'name': 'datatype_group',
              'label': 'Datatype Group',
              'type': DataTypeGroup,
              'required': True,
              'conditions': FilterChain(fields=[FilterChain.datatype + ".no_of_ranges"],
                                        operations=["=="], values=[2])}]
Ejemplo n.º 30
0
 def get_available_datatypes(self, project_id, data_name, filters=None):
     """
     Return all dataTypes that match a given name and some filters.
     """
     data_class = FilterChain._get_class_instance(data_name)
     if data_class is None:
         self.logger.warning("Invalid Class specification:" + str(data_name))
         return []
     else:
         self.logger.debug('Filtering:' + str(data_class))
         return dao.get_values_of_datatype(project_id, data_class, filters)
Ejemplo n.º 31
0
 def get_input_tree(self):
     return [
         dict(name="connectivity",
              label=self._ui_connectivity_label,
              type=Connectivity,
              required=True,
              conditions=FilterChain(
                  fields=[FilterChain.datatype + '._undirected'],
                  operations=["=="],
                  values=['1']))
     ]
Ejemplo n.º 32
0
 def get_input_tree(self):
     """
     Return a list of lists describing the interface to the analyzer. This
     is used by the GUI to generate the menus and fields necessary for
     defining a simulation.
     """
     algorithm = NodeCovariance()
     algorithm.trait.bound = self.INTERFACE_ATTRIBUTES_ONLY
     tree = algorithm.interface[self.INTERFACE_ATTRIBUTES]
     tree[0]['conditions'] = FilterChain(fields = [FilterChain.datatype + '._nr_dimensions'], operations = ["=="], values = [4])
     return tree
Ejemplo n.º 33
0
 def get_available_datatypes(self, project_id, data_name, filters=None):
     """
     Return all dataTypes that match a given name and some filters.
     """
     data_class = FilterChain._get_class_instance(data_name)
     if data_class is None:
         self.logger.warning("Invalid Class specification:" + str(data_name))
         return [], 0
     else:
         self.logger.debug('Filtering:' + str(data_class))
         return dao.get_values_of_datatype(project_id, data_class, filters, self.MAXIMUM_DATA_TYPES_DISPLAYED)
Ejemplo n.º 34
0
    def _get_launchable_algorithms(self, datatype_gid, categories):

        datatype_instance = dao.get_datatype_by_gid(datatype_gid)
        data_class = datatype_instance.__class__
        all_compatible_classes = [data_class.__name__]
        for one_class in getmro(data_class):
            if issubclass(one_class, MappedType) and one_class.__name__ not in all_compatible_classes:
                all_compatible_classes.append(one_class.__name__)

        self.logger.debug("Searching in categories: " + str(categories) + " for classes " + str(all_compatible_classes))
        categories_ids = [categ.id for categ in categories]
        launchable_adapters = dao.get_applicable_adapters(all_compatible_classes, categories_ids)

        filtered_adapters = []
        for stored_adapter in launchable_adapters:
            filter_chain = FilterChain.from_json(stored_adapter.datatype_filter)
            if not filter_chain or filter_chain.get_python_filter_equivalent(datatype_instance):
                filtered_adapters.append(stored_adapter)

        return datatype_instance, filtered_adapters
Ejemplo n.º 35
0
    def build_operations_filters(simulation_algorithm, logged_user_id):
        """
        :returns: list of filters that can be applied on Project View Operations page.
        """
        new_filters = []

        ### Filter by algorithm / categories 
        new_filter = FilterChain("Omit Views", [FilterChain.algorithm_category + '.display'],
                                 [False], operations=["=="])
        new_filters.append(new_filter)

        new_filter = FilterChain("Only Upload", [FilterChain.algorithm_category + '.rawinput'],
                                 [True], operations=["=="])
        new_filters.append(new_filter)
        if simulation_algorithm is not None:
            new_filter = FilterChain("Only Simulations", [FilterChain.algorithm + '.id'],
                                     [simulation_algorithm.id], operations=["=="])
            new_filters.append(new_filter)

        ### Filter by operation status
        filtered_statuses = {model.STATUS_STARTED: "Only Running",
                             model.STATUS_ERROR: "Only with Errors",
                             model.STATUS_CANCELED: "Only Canceled",
                             model.STATUS_FINISHED: "Only Finished",
                             model.STATUS_PENDING: "Only Pending"}
        for status, title in six.iteritems(filtered_statuses):
            new_filter = FilterChain(title, [FilterChain.operation + '.status'], [status], operations=["=="])
            new_filters.append(new_filter)

        ### Filter by author
        new_filter = FilterChain("Only mine", [FilterChain.operation + '.fk_launched_by'],
                                 [logged_user_id], operations=["=="])
        new_filters.append(new_filter)

        ### Filter by other flags
        new_filter = FilterChain("Only relevant", [FilterChain.operation + '.visible'], [True], operations=["=="])
        new_filter.selected = True
        new_filters.append(new_filter)

        return new_filters
    def getfiltereddatatypes(self, name, parent_div, tree_session_key, filters):
        """
        Given the name from the input tree, the dataType required and a number of
        filters, return the available dataType that satisfy the conditions imposed.
        """
        previous_tree = self.context.get_session_tree_for_key(tree_session_key)
        if previous_tree is None:
            base.set_error_message("Adapter Interface not in session for filtering!")
            raise cherrypy.HTTPRedirect("/tvb?error=True")
        current_node = self._get_node(previous_tree, name)
        if current_node is None:
            raise Exception("Could not find node :" + name)
        datatype = current_node[ABCAdapter.KEY_DATATYPE]

        filters = json.loads(filters)
        availablefilter = json.loads(FilterChain.get_filters_for_type(datatype))
        for i, filter_ in enumerate(filters[FILTER_FIELDS]):
            #Check for filter input of type 'date' as these need to be converted
            if filter_ in availablefilter and availablefilter[filter_][FILTER_TYPE] == 'date':
                try:
                    filter_ = string2date(filter_, False)
                    filters[FILTER_VALUES][i] = filter_
                except ValueError, excep:
                    raise excep
    def retrieve_launchers(self, dataname, datatype_gid=None, inspect_group=False, exclude_categories=None):
        """
        Returns all the available launch-able algorithms from the database.
        Filter the ones accepting as required input a specific DataType.

        :param dataname: String or class representing DataType to retrieve filters for it.
        :param datatype_gid: Optional GID, to filter algorithms for this particular entity.
        :param inspect_group: TRUE if we are now in the inspection of sub-entities in a DataTypeGroup
        :param exclude_categories: List of categories to be excluded from the result.
        """
        if exclude_categories is None:
            exclude_categories = []
        launch_categ = dao.get_launchable_categories()
        launch_categ = dict((categ.id, categ.displayname) for categ in launch_categ
                            if categ.id not in exclude_categories)
        launch_groups = dao.get_apliable_algo_groups(dataname, launch_categ.keys())

        if datatype_gid is None:
            return ProjectService.__prepare_group_result(launch_groups, launch_categ, inspect_group)

        try:
            datatype_instance = dao.get_datatype_by_gid(datatype_gid)
            data_class = datatype_instance.__class__
            for one_class in data_class.__bases__:
                launch_groups.extend(dao.get_apliable_algo_groups(one_class.__name__, launch_categ.keys()))
            specific_datatype = dao.get_generic_entity(data_class, datatype_gid, "gid")
            to_remove = []
            for one_group in launch_groups:
                valid_algorithms = []
                for one_algo in one_group.children:
                    filter_chain = FilterChain.from_json(one_algo.datatype_filter)
                    if not filter_chain or filter_chain.get_python_filter_equivalent(specific_datatype[0]):
                        valid_algorithms.append(one_algo)
                if len(valid_algorithms) > 0:
                    one_group.children = copy.deepcopy(valid_algorithms)
                else:
                    to_remove.append(one_group)
            for one_group in to_remove:
                launch_groups.remove(one_group)
                del one_group
            launchers = ProjectService.__prepare_group_result(launch_groups, launch_categ, inspect_group)

            if dataname == model.DataTypeGroup.__name__:
                # If part of a group, update also with specific launchers of that datatype
                dt_group = dao.get_datatype_group_by_gid(datatype_gid)
                datatypes = dao.get_datatypes_from_datatype_group(dt_group.id)
                if len(datatypes):
                    datatype = datatypes[-1]
                    datatype = dao.get_datatype_by_gid(datatype.gid)
                    views_categ_id = dao.get_visualisers_categories()[0].id
                    specific_launchers = self.retrieve_launchers(datatype.__class__.__name__, datatype.gid,
                                                                 True, [views_categ_id] + exclude_categories)
                    for key in specific_launchers:
                        if key in launchers:
                            launchers[key].update(specific_launchers[key])
                        else:
                            launchers[key] = specific_launchers[key]
            return launchers
        except Exception, excep:
            ProjectService().logger.exception(excep)
            ProjectService().logger.warning("Attempting to filter launcher  for group despite exception!")
            return ProjectService.__prepare_group_result(launch_groups, launch_categ, inspect_group)
Ejemplo n.º 38
0
    def fill_input_tree_with_options(self, attributes_list, project_id, category_key):
        """
        For a datatype node in the input tree, load all instances from the db that fit the filters.
        """
        result = []
        for param in attributes_list:
            if param.get(KEY_UI_HIDE):
                continue
            transformed_param = copy(param)

            if KEY_TYPE in param and param[KEY_TYPE] not in STATIC_ACCEPTED_TYPES:

                if KEY_CONDITION in param:
                    filter_condition = param[KEY_CONDITION]
                else:
                    filter_condition = FilterChain('')
                filter_condition.add_condition(FilterChain.datatype + ".visible", "==", True)

                values, total_count = self.populate_option_values_for_dtype(project_id, param[KEY_TYPE], filter_condition,
                                                      category_key)
                if param.get(KEY_ATTRIBUTES): # copy complex datatype attributes to all options
                    complex_dt_attributes = self.fill_input_tree_with_options(param[KEY_ATTRIBUTES],
                                                                    project_id, category_key)
                    for value in values:
                        if value[KEY_NAME] != 'All':
                            value[KEY_ATTRIBUTES] = complex_dt_attributes

                if total_count > MAXIMUM_DATA_TYPES_DISPLAYED:
                    transformed_param[KEY_WARNING] = WARNING_OVERFLOW

                if param.get(KEY_REQUIRED) and len(values) > 0 and param.get(KEY_DEFAULT) is None:
                    transformed_param[KEY_DEFAULT] = str(values[-1][KEY_VALUE])

                transformed_param[KEY_FILTERABLE] = FilterChain.get_filters_for_type(param[KEY_TYPE])
                transformed_param[KEY_TYPE] = TYPE_SELECT
                # If Portlet dynamic parameter, don't add the options instead
                # just add the default value.
                if KEY_DYNAMIC in param:
                    dynamic_param = {KEY_NAME: param[KEY_DEFAULT],
                                     KEY_VALUE: param[KEY_DEFAULT]}
                    transformed_param[KEY_OPTIONS] = [dynamic_param]
                else:
                    transformed_param[KEY_OPTIONS] = values
                if type(param[KEY_TYPE]) == str:
                    transformed_param[KEY_DATATYPE] = param[KEY_TYPE]
                else:
                    data_type = param[KEY_TYPE]
                    transformed_param[KEY_DATATYPE] = data_type.__module__ + '.' + data_type.__name__

                ### DataType-attributes are no longer necessary, they were already copied on each OPTION
                transformed_param[KEY_ATTRIBUTES] = []

            else:
                if param.get(KEY_OPTIONS) is not None:
                    transformed_param[KEY_OPTIONS] = self.fill_input_tree_with_options(param[KEY_OPTIONS],
                                                                                        project_id, category_key)
                    if param.get(KEY_REQUIRED) and len(param[KEY_OPTIONS]) > 0 and param.get(KEY_DEFAULT) is None:
                        transformed_param[KEY_DEFAULT] = str(param[KEY_OPTIONS][-1][KEY_VALUE])

                if param.get(KEY_ATTRIBUTES) is not None:
                    transformed_param[KEY_ATTRIBUTES] = self.fill_input_tree_with_options(param[KEY_ATTRIBUTES],
                                                                                          project_id, category_key)
            result.append(transformed_param)
        return result
Ejemplo n.º 39
0
    def retrieve_launchers(self, datatype_gid, inspect_group=False, include_categories=None):
        """
        Returns all the available launch-able algorithms from the database.
        Filter the ones accepting as required input a specific DataType.

        :param datatype_gid: GID, to filter algorithms for this particular entity.
        :param inspect_group: TRUE if we are now in the inspection of sub-entities in a DataTypeGroup
        :param include_categories: List of categories to be included in the result.
                When None, all lanchable categories are included
        """
        try:
            all_launch_categ = dao.get_launchable_categories()
            launch_categ = dict((categ.id, categ.displayname) for categ in all_launch_categ
                                if include_categories is None or categ.id in include_categories)

            datatype_instance = dao.get_datatype_by_gid(datatype_gid)
            data_class = datatype_instance.__class__
            all_compatible_classes = [data_class.__name__]
            for one_class in getmro(data_class):
                if issubclass(one_class, MappedType) and one_class.__name__ not in all_compatible_classes:
                    all_compatible_classes.append(one_class.__name__)

            self.logger.debug("Searching in categories: " + str(len(launch_categ)) + " - " +
                              str(launch_categ.keys()) + "-" + str(include_categories))
            launchable_groups = dao.get_apliable_algo_groups(all_compatible_classes, launch_categ.keys())

            to_remove = []
            for one_group in launchable_groups:
                compatible_algorithms = []
                for one_algo in one_group.children:
                    filter_chain = FilterChain.from_json(one_algo.datatype_filter)
                    if not filter_chain or filter_chain.get_python_filter_equivalent(datatype_instance):
                        compatible_algorithms.append(one_algo)
                if len(compatible_algorithms) > 0:
                    one_group.children = copy.deepcopy(compatible_algorithms)
                else:
                    to_remove.append(one_group)

            for one_group in to_remove:
                launchable_groups.remove(one_group)
                del one_group

            launchers = ProjectService.__prepare_group_result(launchable_groups, launch_categ, inspect_group)

            if data_class.__name__ == model.DataTypeGroup.__name__:
                # If part of a group, update also with specific launchers of the child datatype
                dt_group = dao.get_datatype_group_by_gid(datatype_gid)
                datatypes = dao.get_datatypes_from_datatype_group(dt_group.id)
                if len(datatypes):
                    datatype = datatypes[-1]
                    datatype = dao.get_datatype_by_gid(datatype.gid)

                    views_categ_id = dao.get_visualisers_categories()[0].id
                    categories_for_small_type = [categ.id for categ in all_launch_categ
                                                 if categ.id != views_categ_id and (include_categories is None or
                                                                                    categ.id in include_categories)]
                    if categories_for_small_type:
                        specific_launchers = self.retrieve_launchers(datatype.gid, True, categories_for_small_type)
                        for key in specific_launchers:
                            if key in launchers:
                                launchers[key].update(specific_launchers[key])
                            else:
                                launchers[key] = specific_launchers[key]
            return launchers

        except Exception, excep:
            ProjectService().logger.exception(excep)
            ProjectService().logger.warning("Attempting to filter launcher for group despite exception!")
            return ProjectService.__prepare_group_result([], [], inspect_group)
Ejemplo n.º 40
0
    def prepare_parameters(self, attributes_list, project_id, category_key):
        """
        Private method, to be called recursively.
        It will receive a list of Attributes, and it will populate 'options'
        entry with data references from DB.
        """
        result = []
        for param in attributes_list:
            if param.get(ABCAdapter.KEY_UI_HIDE):
                continue
            transformed_param = copy(param)

            if (ABCAdapter.KEY_TYPE in param) and not (param[ABCAdapter.KEY_TYPE] in ABCAdapter.STATIC_ACCEPTED_TYPES):

                if ABCAdapter.KEY_CONDITION in param:
                    filter_condition = param[ABCAdapter.KEY_CONDITION]
                else:
                    filter_condition = FilterChain('')
                filter_condition.add_condition(FilterChain.datatype + ".visible", "==", True)

                data_list, total_count = self.get_available_datatypes(project_id, param[ABCAdapter.KEY_TYPE],
                                                                      filter_condition)

                if total_count > self.MAXIMUM_DATA_TYPES_DISPLAYED:
                    transformed_param[self.KEY_WARNING] = self.WARNING_OVERFLOW

                complex_dt_attributes = None
                if param.get(ABCAdapter.KEY_ATTRIBUTES):
                    complex_dt_attributes = self.prepare_parameters(param[ABCAdapter.KEY_ATTRIBUTES], 
                                                                    project_id, category_key)
                values = self.populate_values(data_list, param[ABCAdapter.KEY_TYPE], 
                                              category_key, complex_dt_attributes)
                
                if (transformed_param.get(ABCAdapter.KEY_REQUIRED) and len(values) > 0 and
                        transformed_param.get(ABCAdapter.KEY_DEFAULT) in [None, 'None']):
                    transformed_param[ABCAdapter.KEY_DEFAULT] = str(values[-1][ABCAdapter.KEY_VALUE])
                transformed_param[ABCAdapter.KEY_FILTERABLE] = FilterChain.get_filters_for_type(
                    param[ABCAdapter.KEY_TYPE])
                transformed_param[ABCAdapter.KEY_TYPE] = ABCAdapter.TYPE_SELECT
                # If Portlet dynamic parameter, don't add the options instead
                # just add the default value. 
                if KEY_DYNAMIC in param:
                    dynamic_param = {ABCAdapter.KEY_NAME: param[ABCAdapter.KEY_DEFAULT],
                                     ABCAdapter.KEY_VALUE: param[ABCAdapter.KEY_DEFAULT]}
                    transformed_param[ABCAdapter.KEY_OPTIONS] = [dynamic_param]
                else:
                    transformed_param[ABCAdapter.KEY_OPTIONS] = values
                if type(param[ABCAdapter.KEY_TYPE]) == str:
                    transformed_param[ABCAdapter.KEY_DATATYPE] = param[ABCAdapter.KEY_TYPE]
                else:
                    data_type = param[ABCAdapter.KEY_TYPE]
                    transformed_param[ABCAdapter.KEY_DATATYPE] = data_type.__module__ + '.' + data_type.__name__
            
                ### DataType-attributes are no longer necessary, they were already copied on each OPTION
                transformed_param[ABCAdapter.KEY_ATTRIBUTES] = []
                
            else:
                if param.get(ABCAdapter.KEY_OPTIONS) is not None:
                    transformed_param[ABCAdapter.KEY_OPTIONS] = self.prepare_parameters(param[ABCAdapter.KEY_OPTIONS],
                                                                                        project_id, category_key)
                    if (transformed_param.get(ABCAdapter.KEY_REQUIRED) and
                            len(param[ABCAdapter.KEY_OPTIONS]) > 0 and
                            (transformed_param.get(ABCAdapter.KEY_DEFAULT) in [None, 'None'])):
                        def_val = str(param[ABCAdapter.KEY_OPTIONS][-1][ABCAdapter.KEY_VALUE])
                        transformed_param[ABCAdapter.KEY_DEFAULT] = def_val
                    
                if param.get(ABCAdapter.KEY_ATTRIBUTES) is not None:
                    transformed_param[ABCAdapter.KEY_ATTRIBUTES] = self.prepare_parameters(
                        param[ABCAdapter.KEY_ATTRIBUTES], project_id, category_key)
            result.append(transformed_param)   
        return result