def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 0, "There should be no operation")

        algogroup = dao.find_group("tvb.tests.framework.adapters.testadapter3", "TestAdapter3")
        group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup)
        adapter_instance = flow_service.build_adapter_instance(group)
        data = {model.RANGE_PARAMETER_1: "param_5", "param_5": [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 1, "Expected one operation group")
        self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group")

        operation_group_id = all_operations[0][3]
        self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.")

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
        self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
        self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect")
Exemple #2
0
    def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 0, "There should be no operation")

        algogroup = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup)
        adapter_instance = flow_service.build_adapter_instance(group)
        data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 1, "Expected one operation group")
        self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group")

        operation_group_id = all_operations[0][3]
        self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.")

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
        self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
        self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect")
Exemple #3
0
def fire_simulation(project_id=1, **kwargs):
    project = dao.get_project_by_id(project_id)
    flow_service = FlowService()

    # below the holy procedure to launch with the correct parameters taken from the defaults
    _, algo_group = flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
    simulator_adapter = flow_service.build_adapter_instance(algo_group)
    flatten_interface = simulator_adapter.flaten_input_interface()
    prepared_flatten_interface = flow_service.prepare_parameters(flatten_interface, project.id,
                                                                      algo_group.fk_category)
    launch_args = {}
    for entry in prepared_flatten_interface:
        value = entry['default']
        if isinstance(value, dict):
            value = str(value)
        if hasattr(value, 'tolist'):
            value = value.tolist()
        launch_args[entry['name']] = value
    launch_args.update(**kwargs)
    # end of magic

    launched_operation = flow_service.fire_operation(simulator_adapter, project.administrator,
                                                     project.id, **launch_args)[0]
    return launched_operation
class ModelValidator(object):
    overwrites = {}


    def __init__(self, overwrites=None, settings_file=None):
        """ Parameters can be overwritten either from a settigns file or from a dictionary. """
        if overwrites is not None:
            self.overwrites.update(overwrites)
        if settings_file is not None:
            settings = open(sys.argv[1]).read()
            for line in settings.split('\n'):
                key, value = line.split('=')
                self.overwrites[key.strip()] = value.strip()
        if KEY_PROJECT not in self.overwrites:
            raise Exception("Settings file should contain the id of the project: %s=1" % KEY_PROJECT)
        self.project = dao.get_project_by_id(self.overwrites[KEY_PROJECT])
        self.flow_service = FlowService()
        self.operation_service = OperationService()


    def launch_validation(self):
        """
        Prepare the arguments to be submitted and launch actual operations group.
        TODO: Now get the results and check if any errors
        """
        _, algo_group = self.flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
        simulator_adapter = self.flow_service.build_adapter_instance(algo_group)
        launch_args = {}
        flatten_interface = simulator_adapter.flaten_input_interface()
        prepared_flatten_interface = self.flow_service.prepare_parameters(flatten_interface, self.project.id,
                                                                          algo_group.fk_category)
        for entry in prepared_flatten_interface:
            value = entry['default']
            if isinstance(value, dict):
                value = str(value)
            if hasattr(value, 'tolist'):
                value = value.tolist()
            launch_args[entry['name']] = value
        launch_args.update(self.overwrites)
        
        nr_of_operations = 1
        for key in self.overwrites:
            if key.startswith(PARAM_RANGE_PREFIX):
                range_values = self.operation_service.get_range_values(launch_args, key)
                nr_of_operations *= len(range_values)
        do_launch = False
        print "Warning! This will launch %s operations. Do you agree? (yes/no)" % nr_of_operations
        while 1:
            accept = raw_input()
            if accept.lower() == 'yes':
                do_launch = True
                break
            if accept.lower() == 'no':
                do_launch = False
                break
            print "Please type either yes or no"
        
        if do_launch:
            self.launched_operations = self.flow_service.fire_operation(simulator_adapter, self.project.administrator,
                                                                        self.project.id, **launch_args)
            return self.validate_results(0)
        else:
            return "Operation canceled by user."


    def validate_results(self, last_verified_index):
        error_count = 0
        while last_verified_index < len(self.launched_operations):
            operation_to_check = self.launched_operations[last_verified_index]
            operation = dao.get_operation_by_id(operation_to_check.id)
            if operation.status == STATUS_STARTED:
                sleep(10)
            if operation.status == STATUS_ERROR:
                sys.stdout.write("E(" + str(operation_to_check.id) + ")")
                error_count += 1
                last_verified_index += 1
                sys.stdout.flush()
            if operation.status == STATUS_FINISHED:
                last_verified_index += 1
                sys.stdout.write('.')
                sys.stdout.flush()
        if error_count:
            return "%s operations in error; %s operations successfully." % (error_count,
                                                                            len(self.launched_operations) - error_count)
        return "All operations finished successfully!"
class SpatioTemporalController(BaseController):
    """
    Base class which contains methods related to spatio-temporal actions.
    """

    def __init__(self):
        BaseController.__init__(self)
        self.flow_service = FlowService()
        self.logger = get_logger(__name__)
        editable_entities = [dict(link='/spatial/stimulus/region/step_1_submit/1/1', title='Region Stimulus',
                                  subsection='regionstim', description='Create a new Stimulus on Region level'),
                             dict(link='/spatial/stimulus/surface/step_1_submit/1/1', title='Surface Stimulus',
                                  subsection='surfacestim', description='Create a new Stimulus on Surface level')]
        self.submenu_list = editable_entities


    @expose_page
    @settings
    def index(self, **data):
        """
        Displays the main page for the spatio temporal section.
        """
        template_specification = {'title': "Spatio temporal", 'data': data, 'mainContent': 'header_menu'}
        return self.fill_default_attributes(template_specification)



    @staticmethod
    def display_surface(surface_gid):
        """
        Generates the HTML for displaying the surface with the given ID.
        """
        surface = ABCAdapter.load_entity_by_gid(surface_gid)
        common.add2session(PARAM_SURFACE, surface_gid)
        url_vertices_pick, url_normals_pick, url_triangles_pick = surface.get_urls_for_pick_rendering()
        url_vertices, url_normals, _, url_triangles = surface.get_urls_for_rendering()

        return {
            'urlVerticesPick': json.dumps(url_vertices_pick),
            'urlTrianglesPick': json.dumps(url_triangles_pick),
            'urlNormalsPick': json.dumps(url_normals_pick),
            'urlVertices': json.dumps(url_vertices),
            'urlTriangles': json.dumps(url_triangles),
            'urlNormals': json.dumps(url_normals),
            'brainCenter': json.dumps(surface.center())
        }


    @staticmethod
    def prepare_entity_interface(input_list):
        """
        Prepares the input tree obtained from a creator.
        """
        return {'inputList': input_list,
                common.KEY_PARAMETERS_CONFIG: False}


    def get_creator_and_interface(self, creator_module, creator_class, datatype_instance, lock_midpoint_for_eq=None):
        """
        Returns a Tuple: a creator instance and a dictionary for the creator interface.
        The interface is prepared for rendering, it is populated with existent data, in case of a
        parameter of type DataType. The name of the attributes are also prefixed to identify groups.
        """
        algo_group = self.flow_service.get_algorithm_by_module_and_class(creator_module, creator_class)[1]
        group, _ = self.flow_service.prepare_adapter(common.get_current_project().id, algo_group)

        #I didn't use the interface(from the above line) returned by the method 'prepare_adapter' from flow service
        # because the selects that display dataTypes will also have the 'All' entry.
        datatype_instance.trait.bound = traited_interface.INTERFACE_ATTRIBUTES_ONLY
        input_list = datatype_instance.interface[traited_interface.INTERFACE_ATTRIBUTES]
        if lock_midpoint_for_eq is not None:
            for idx in lock_midpoint_for_eq:
                input_list[idx] = self._lock_midpoints(input_list[idx])
        category = self.flow_service.get_visualisers_category()
        input_list = self.flow_service.prepare_parameters(input_list, common.get_current_project().id, category.id)
        input_list = ABCAdapter.prepare_param_names(input_list)

        return self.flow_service.build_adapter_instance(group), input_list


    @staticmethod
    def get_series_json(data, label):
        """ For each data point entry, build the FLOT specific JSON. """
        return '{"data": %s, "label": "%s"}' % (json.dumps(data), label)


    @staticmethod
    def build_final_json(list_of_series):
        """ Given a list with all the data points, build the final FLOT json. """
        return '[' + ','.join(list_of_series) + ']'


    @staticmethod
    def get_ui_message(list_of_equation_names):
        """
        The message returned by this method should be displayed if
        the equation with the given name couldn't be evaluated in all points.
        """
        if list_of_equation_names:
            return ("Could not evaluate the " + ", ".join(list_of_equation_names) + " equation(s) "
                    "in all the points. Some of the values were changed.")
        else:
            return ""


    def get_select_existent_entities(self, label, entity_type, entity_gid=None):
        """
        Returns the dictionary needed for drawing the select which display all
        the created entities of the specified type.
        """
        project_id = common.get_current_project().id
        category = self.flow_service.get_visualisers_category()

        interface = [{'name': 'existentEntitiesSelect', 'label': label, 'type': entity_type}]
        if entity_gid is not None:
            interface[0]['default'] = entity_gid
        interface = self.flow_service.prepare_parameters(interface, project_id, category.id)
        interface = ABCAdapter.prepare_param_names(interface)

        return interface


    @staticmethod
    def add_interface_to_session(left_input_tree, right_input_tree):
        """
        left_input_tree and right_input_tree are expected to be lists of dictionaries.

        Those 2 given lists will be concatenated and added to session.
        In order to work the filters, the interface should be added to session.
        """
        entire_tree = deepcopy(left_input_tree)
        entire_tree.extend(right_input_tree)
        SelectedAdapterContext().add_adapter_to_session(None, entire_tree)


    def fill_default_attributes(self, template_dictionary, subsection='stimulus'):
        """
        Overwrite base controller to add required parameters for adapter templates.
        """
        template_dictionary[common.KEY_SECTION] = 'stimulus'
        template_dictionary[common.KEY_SUB_SECTION] = subsection
        template_dictionary[common.KEY_SUBMENU_LIST] = self.submenu_list
        template_dictionary[common.KEY_INCLUDE_RESOURCES] = 'spatial/included_resources'
        BaseController.fill_default_attributes(self, template_dictionary)
        return template_dictionary


    def get_x_axis_range(self, min_x_str, max_x_str):
        """
        Fill range for the X-axis displayed in 2D graph.
        """
        try:
            min_x = int(min_x_str)
        except ValueError:
            return 0, 100, "The min value for the x-axis should be an integer value."

        try:
            max_x = int(max_x_str)
        except ValueError:
            return 0, 100, "The max value for the x-axis should be an integer value."

        if min_x >= max_x:
            return 0, 100, "The min value for the x-axis should be smaller then the max value of the x-axis."

        return min_x, max_x, ''


    @staticmethod
    def _lock_midpoints(equations_dict):
        """
        Set mid-points for gaussian / double gausians as locked to 0.0 in case of spatial equations.
        """
        for equation in equations_dict[ABCAdapter.KEY_OPTIONS]:
            if equation[ABCAdapter.KEY_NAME] == 'Gaussian':
                for entry in equation[ABCAdapter.KEY_ATTRIBUTES][1][ABCAdapter.KEY_ATTRIBUTES]:
                    if entry[ABCAdapter.KEY_NAME] == 'midpoint':
                        entry['locked'] = True
            if equation[ABCAdapter.KEY_NAME] == 'DoubleGaussian':
                for entry in equation[ABCAdapter.KEY_ATTRIBUTES][1][ABCAdapter.KEY_ATTRIBUTES]:
                    if entry[ABCAdapter.KEY_NAME] == 'midpoint1':
                        entry['locked'] = True
        return equations_dict
class FlowServiceTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.services.flow_service module.
    """
    def setUp(self):
        """ Prepare some entities to work with during tests:"""

        self.flow_service = FlowService()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(admin=self.test_user)
        ### Insert some starting data in the database.
        categ1 = model.AlgorithmCategory('one', True)
        self.categ1 = dao.store_entity(categ1)
        categ2 = model.AlgorithmCategory('two', rawinput=True)
        self.categ2 = dao.store_entity(categ2)

        group1 = model.AlgorithmGroup("test_module1", "classname1", categ1.id)
        self.algo_group1 = dao.store_entity(group1)
        group2 = model.AlgorithmGroup("test_module2", "classname2", categ2.id)
        self.algo_group2 = dao.store_entity(group2)
        group3 = model.AlgorithmGroup("test_module3", "classname3", categ1.id)
        self.algo_group3 = dao.store_entity(group3)

        group_v = model.AlgorithmGroup(TEST_ADAPTER_VALID_MODULE,
                                       TEST_ADAPTER_VALID_CLASS, categ2.id)
        self.algo_group_v = dao.store_entity(group_v)

        algo_v = model.Algorithm(self.algo_group_v.id,
                                 'ident',
                                 name='',
                                 req_data='',
                                 param_name='',
                                 output='')
        self.algorithm_v = dao.store_entity(algo_v)

        algo1 = model.Algorithm(self.algo_group1.id,
                                'id',
                                name='',
                                req_data='',
                                param_name='',
                                output='')
        self.algorithm1 = dao.store_entity(algo1)

    def tearDown(self):
        for algo in [self.algorithm1, self.algorithm_v]:
            dao.remove_entity(model.Algorithm, algo.id)

        for group in [
                self.algo_group1, self.algo_group2, self.algo_group3,
                self.algorithm_v
        ]:
            dao.remove_entity(model.AlgorithmGroup, group.id)

        for categ in [self.categ1, self.categ2]:
            dao.remove_entity(model.AlgorithmCategory, categ.id)

    def test_groups_for_categories(self):
        """
        Test getting algorithms for specific categories.
        """
        category1 = self.flow_service.get_groups_for_categories([self.categ1])
        category2 = self.flow_service.get_groups_for_categories([self.categ2])

        dummy = model.AlgorithmCategory('dummy', rawinput=True)
        dummy.id = 999
        unexisting_cat = self.flow_service.get_groups_for_categories([dummy])

        self.assertEqual(len(category1), 2)
        self.assertEqual(len(category2), 2)
        self.assertEqual(len(unexisting_cat), 0)

        for group in category1:
            if group.module not in ["test_module1", "test_module3"]:
                self.fail("Some invalid data retrieved")
        for group in category2:
            if group.module not in ["test_module2", TEST_ADAPTER_VALID_MODULE]:
                self.fail("Some invalid data retrieved")

    def test_get_broup_by_identifier(self):
        """
        Test for the get_algorithm_by_identifier.
        """
        algo_ret = self.flow_service.get_algo_group_by_identifier(
            self.algo_group1.id)
        self.assertEqual(algo_ret.id, self.algo_group1.id,
                         "ID-s are different!")
        self.assertEqual(algo_ret.module, self.algo_group1.module,
                         "Modules are different!")
        self.assertEqual(algo_ret.fk_category, self.algo_group1.fk_category,
                         "Categories are different!")
        self.assertEqual(algo_ret.classname, self.algo_group1.classname,
                         "Class names are different!")

    def test_build_adapter_instance(self):
        """
        Test standard flow for building an adapter instance.
        """
        algo_group = dao.find_group(TEST_ADAPTER_VALID_MODULE,
                                    TEST_ADAPTER_VALID_CLASS)
        adapter = ABCAdapter.build_adapter(algo_group)
        self.assertTrue(isinstance(adapter, ABCSynchronous),
                        "Something went wrong with valid data!")

    def test_build_adapter_invalid(self):
        """
        Test flow for trying to build an adapter that does not inherit from ABCAdapter.
        """
        group = dao.find_group(TEST_ADAPTER_VALID_MODULE,
                               TEST_ADAPTER_INVALID_CLASS)
        self.assertRaises(OperationException,
                          self.flow_service.build_adapter_instance, group)

    def test_prepare_adapter(self):
        """
        Test preparation of an adapter.
        """
        algo_group = dao.find_group(TEST_ADAPTER_VALID_MODULE,
                                    TEST_ADAPTER_VALID_CLASS)
        group, interface = self.flow_service.prepare_adapter(
            self.test_project.id, algo_group)
        self.assertTrue(isinstance(group, model.AlgorithmGroup),
                        "Something went wrong with valid data!")
        self.assertTrue("name" in interface[0], "Bad interface created!")
        self.assertEquals(interface[0]["name"], "test", "Bad interface!")
        self.assertTrue("type" in interface[0], "Bad interface created!")
        self.assertEquals(interface[0]["type"], "int", "Bad interface!")
        self.assertTrue("default" in interface[0], "Bad interface created!")
        self.assertEquals(interface[0]["default"], "0", "Bad interface!")

    def test_fire_operation(self):
        """
        Test preparation of an adapter and launch mechanism.
        """
        algo_group = dao.find_group(TEST_ADAPTER_VALID_MODULE,
                                    TEST_ADAPTER_VALID_CLASS)
        adapter = self.flow_service.build_adapter_instance(algo_group)
        data = {"test": 5}
        result = self.flow_service.fire_operation(adapter, self.test_user,
                                                  self.test_project.id, **data)
        self.assertTrue(result.endswith("has finished."), "Operation fail")

    def test_get_filtered_by_column(self):
        """
        Test the filter function when retrieving dataTypes with a filter
        after a column from a class specific table (e.g. DATA_arraywrapper).
        """
        operation_1 = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)
        operation_2 = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)

        one_dim_array = numpy.arange(5)
        two_dim_array = numpy.array([[1, 2], [2, 3], [1, 4]])
        self._store_float_array(one_dim_array, "John Doe 1", operation_1.id)
        self._store_float_array(one_dim_array, "John Doe 2", operation_1.id)
        self._store_float_array(two_dim_array, "John Doe 3", operation_2.id)

        count = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 3, "Problems with inserting data")
        first_filter = FilterChain(
            fields=[FilterChain.datatype + '._nr_dimensions'],
            operations=["=="],
            values=[1])
        count = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray",
            first_filter)[1]
        self.assertEqual(count, 2, "Data was not filtered")

        second_filter = FilterChain(
            fields=[FilterChain.datatype + '._nr_dimensions'],
            operations=["=="],
            values=[2])
        filtered_data = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray",
            second_filter)[0]
        self.assertEqual(len(filtered_data), 1, "Data was not filtered")
        self.assertEqual(filtered_data[0][3], "John Doe 3")

        third_filter = FilterChain(
            fields=[FilterChain.datatype + '._length_1d'],
            operations=["=="],
            values=[3])
        filtered_data = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray",
            third_filter)[0]
        self.assertEqual(len(filtered_data), 1,
                         "Data was not filtered correct")
        self.assertEqual(filtered_data[0][3], "John Doe 3")
        try:
            if os.path.exists('One_dim.txt'):
                os.remove('One_dim.txt')
            if os.path.exists('Two_dim.txt'):
                os.remove('Two_dim.txt')
            if os.path.exists('One_dim-1.txt'):
                os.remove('One_dim-1.txt')
        except Exception:
            pass

    @staticmethod
    def _store_float_array(array_data, subject_name, operation_id):
        """Create Float Array and DB persist it"""
        datatype_inst = MappedArray(user_tag_1=subject_name)
        datatype_inst.set_operation_id(operation_id)
        datatype_inst.array_data = array_data
        datatype_inst.type = "MappedArray"
        datatype_inst.module = "tvb.datatypes.arrays"
        datatype_inst.subject = subject_name
        datatype_inst.state = "RAW"
        dao.store_entity(datatype_inst)

    def test_get_filtered_datatypes(self):
        """
        Test the filter function when retrieving dataTypes.
        """
        #Create some test operations
        start_dates = [
            datetime.now(),
            datetime.strptime("08-06-2010", "%m-%d-%Y"),
            datetime.strptime("07-21-2010", "%m-%d-%Y"),
            datetime.strptime("05-06-2010", "%m-%d-%Y"),
            datetime.strptime("07-21-2011", "%m-%d-%Y")
        ]
        end_dates = [
            datetime.now(),
            datetime.strptime("08-12-2010", "%m-%d-%Y"),
            datetime.strptime("08-12-2010", "%m-%d-%Y"),
            datetime.strptime("08-12-2011", "%m-%d-%Y"),
            datetime.strptime("08-12-2011", "%m-%d-%Y")
        ]
        for i in range(5):
            operation = model.Operation(self.test_user.id,
                                        self.test_project.id,
                                        self.algorithm1.id,
                                        'test params',
                                        status=model.STATUS_FINISHED,
                                        start_date=start_dates[i],
                                        completion_date=end_dates[i])
            operation = dao.store_entity(operation)
            storage_path = FilesHelper().get_project_folder(
                self.test_project, str(operation.id))
            if i < 4:
                datatype_inst = Datatype1()
                datatype_inst.type = "Datatype1"
                datatype_inst.subject = "John Doe" + str(i)
                datatype_inst.state = "RAW"
                datatype_inst.set_operation_id(operation.id)
                dao.store_entity(datatype_inst)
            else:
                for _ in range(2):
                    datatype_inst = Datatype2()
                    datatype_inst.storage_path = storage_path
                    datatype_inst.type = "Datatype2"
                    datatype_inst.subject = "John Doe" + str(i)
                    datatype_inst.state = "RAW"
                    datatype_inst.string_data = ["data"]
                    datatype_inst.set_operation_id(operation.id)
                    dao.store_entity(datatype_inst)

        returned_data = self.flow_service.get_available_datatypes(
            self.test_project.id, Datatype1)[0]
        for row in returned_data:
            if row[1] != 'Datatype1':
                self.fail("Some invalid data was returned!")
        self.assertEqual(4, len(returned_data), "Invalid length of result")

        filter_op = FilterChain(
            fields=[
                FilterChain.datatype + ".state",
                FilterChain.operation + ".start_date"
            ],
            values=["RAW", datetime.strptime("08-01-2010", "%m-%d-%Y")],
            operations=["==", ">"])
        returned_data = self.flow_service.get_available_datatypes(
            self.test_project.id, Datatype1, filter_op)[0]
        returned_subjects = [one_data[3] for one_data in returned_data]

        if "John Doe0" not in returned_subjects or "John Doe1" not in returned_subjects or len(
                returned_subjects) != 2:
            self.fail("DataTypes were not filtered properly!")
class FlowServiceTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.services.flow_service module.
    """


    def setUp(self):
        """ Prepare some entities to work with during tests:"""

        self.flow_service = FlowService()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(admin=self.test_user)
        ### Insert some starting data in the database.
        categ1 = model.AlgorithmCategory('one', True)
        self.categ1 = dao.store_entity(categ1)
        categ2 = model.AlgorithmCategory('two', rawinput=True)
        self.categ2 = dao.store_entity(categ2)

        group1 = model.AlgorithmGroup("test_module1", "classname1", categ1.id)
        self.algo_group1 = dao.store_entity(group1)
        group2 = model.AlgorithmGroup("test_module2", "classname2", categ2.id)
        self.algo_group2 = dao.store_entity(group2)
        group3 = model.AlgorithmGroup("test_module3", "classname3", categ1.id)
        self.algo_group3 = dao.store_entity(group3)

        group_v = model.AlgorithmGroup(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS, categ2.id)
        self.algo_group_v = dao.store_entity(group_v)

        algo_v = model.Algorithm(self.algo_group_v.id, 'ident', name='', req_data='', param_name='', output='')
        self.algorithm_v = dao.store_entity(algo_v)

        algo1 = model.Algorithm(self.algo_group1.id, 'id', name='', req_data='', param_name='', output='')
        self.algorithm1 = dao.store_entity(algo1)


    def tearDown(self):
        for algo in [self.algorithm1, self.algorithm_v]:
            dao.remove_entity(model.Algorithm, algo.id)

        for group in [self.algo_group1, self.algo_group2, self.algo_group3, self.algorithm_v]:
            dao.remove_entity(model.AlgorithmGroup, group.id)

        for categ in [self.categ1, self.categ2]:
            dao.remove_entity(model.AlgorithmCategory, categ.id)


    def test_groups_for_categories(self):
        """
        Test getting algorithms for specific categories.
        """
        category1 = self.flow_service.get_groups_for_categories([self.categ1])
        category2 = self.flow_service.get_groups_for_categories([self.categ2])

        dummy = model.AlgorithmCategory('dummy', rawinput=True)
        dummy.id = 999
        unexisting_cat = self.flow_service.get_groups_for_categories([dummy])

        self.assertEqual(len(category1), 2)
        self.assertEqual(len(category2), 2)
        self.assertEqual(len(unexisting_cat), 0)

        for group in category1:
            if group.module not in ["test_module1", "test_module3"]:
                self.fail("Some invalid data retrieved")
        for group in category2:
            if group.module not in ["test_module2", TEST_ADAPTER_VALID_MODULE]:
                self.fail("Some invalid data retrieved")



    def test_get_broup_by_identifier(self):
        """
        Test for the get_algorithm_by_identifier.
        """
        algo_ret = self.flow_service.get_algo_group_by_identifier(self.algo_group1.id)
        self.assertEqual(algo_ret.id, self.algo_group1.id, "ID-s are different!")
        self.assertEqual(algo_ret.module, self.algo_group1.module, "Modules are different!")
        self.assertEqual(algo_ret.fk_category, self.algo_group1.fk_category, "Categories are different!")
        self.assertEqual(algo_ret.classname, self.algo_group1.classname, "Class names are different!")


    def test_build_adapter_instance(self):
        """
        Test standard flow for building an adapter instance.
        """
        algo_group = dao.find_group(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS)
        adapter = ABCAdapter.build_adapter(algo_group)
        self.assertTrue(isinstance(adapter, ABCSynchronous), "Something went wrong with valid data!")


    def test_build_adapter_invalid(self):
        """
        Test flow for trying to build an adapter that does not inherit from ABCAdapter.
        """
        group = dao.find_group(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_INVALID_CLASS)
        self.assertRaises(OperationException, self.flow_service.build_adapter_instance, group)


    def test_prepare_adapter(self):
        """
        Test preparation of an adapter.
        """
        algo_group = dao.find_group(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS)
        group, interface = self.flow_service.prepare_adapter(self.test_project.id, algo_group)
        self.assertTrue(isinstance(group, model.AlgorithmGroup), "Something went wrong with valid data!")
        self.assertTrue("name" in interface[0], "Bad interface created!")
        self.assertEquals(interface[0]["name"], "test", "Bad interface!")
        self.assertTrue("type" in interface[0], "Bad interface created!")
        self.assertEquals(interface[0]["type"], "int", "Bad interface!")
        self.assertTrue("default" in interface[0], "Bad interface created!")
        self.assertEquals(interface[0]["default"], "0", "Bad interface!")


    def test_fire_operation(self):
        """
        Test preparation of an adapter and launch mechanism.
        """
        algo_group = dao.find_group(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS)
        adapter = self.flow_service.build_adapter_instance(algo_group)
        data = {"test": 5}
        result = self.flow_service.fire_operation(adapter, self.test_user, self.test_project.id, **data)
        self.assertTrue(result.endswith("has finished."), "Operation fail")


    def test_get_filtered_by_column(self):
        """
        Test the filter function when retrieving dataTypes with a filter
        after a column from a class specific table (e.g. DATA_arraywrapper).
        """
        operation_1 = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        operation_2 = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)

        one_dim_array = numpy.arange(5)
        two_dim_array = numpy.array([[1, 2], [2, 3], [1, 4]])
        self._store_float_array(one_dim_array, "John Doe 1", operation_1.id)
        self._store_float_array(one_dim_array, "John Doe 2", operation_1.id)
        self._store_float_array(two_dim_array, "John Doe 3", operation_2.id)

        count = self.flow_service.get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 3, "Problems with inserting data")
        first_filter = FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'], operations=["=="], values=[1])
        count = self.flow_service.get_available_datatypes(self.test_project.id,
                                                          "tvb.datatypes.arrays.MappedArray", first_filter)[1]
        self.assertEqual(count, 2, "Data was not filtered")

        second_filter = FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'], operations=["=="], values=[2])
        filtered_data = self.flow_service.get_available_datatypes(self.test_project.id,
                                                                  "tvb.datatypes.arrays.MappedArray", second_filter)[0]
        self.assertEqual(len(filtered_data), 1, "Data was not filtered")
        self.assertEqual(filtered_data[0][3], "John Doe 3")

        third_filter = FilterChain(fields=[FilterChain.datatype + '._length_1d'], operations=["=="], values=[3])
        filtered_data = self.flow_service.get_available_datatypes(self.test_project.id,
                                                                  "tvb.datatypes.arrays.MappedArray", third_filter)[0]
        self.assertEqual(len(filtered_data), 1, "Data was not filtered correct")
        self.assertEqual(filtered_data[0][3], "John Doe 3")
        try:
            if os.path.exists('One_dim.txt'):
                os.remove('One_dim.txt')
            if os.path.exists('Two_dim.txt'):
                os.remove('Two_dim.txt')
            if os.path.exists('One_dim-1.txt'):
                os.remove('One_dim-1.txt')
        except Exception:
            pass


    @staticmethod
    def _store_float_array(array_data, subject_name, operation_id):
        """Create Float Array and DB persist it"""
        datatype_inst = MappedArray(user_tag_1=subject_name)
        datatype_inst.set_operation_id(operation_id)
        datatype_inst.array_data = array_data
        datatype_inst.type = "MappedArray"
        datatype_inst.module = "tvb.datatypes.arrays"
        datatype_inst.subject = subject_name
        datatype_inst.state = "RAW"
        dao.store_entity(datatype_inst)


    def test_get_filtered_datatypes(self):
        """
        Test the filter function when retrieving dataTypes.
        """
        #Create some test operations
        start_dates = [datetime.now(),
                       datetime.strptime("08-06-2010", "%m-%d-%Y"),
                       datetime.strptime("07-21-2010", "%m-%d-%Y"),
                       datetime.strptime("05-06-2010", "%m-%d-%Y"),
                       datetime.strptime("07-21-2011", "%m-%d-%Y")]
        end_dates = [datetime.now(),
                     datetime.strptime("08-12-2010", "%m-%d-%Y"),
                     datetime.strptime("08-12-2010", "%m-%d-%Y"),
                     datetime.strptime("08-12-2011", "%m-%d-%Y"),
                     datetime.strptime("08-12-2011", "%m-%d-%Y")]
        for i in range(5):
            operation = model.Operation(self.test_user.id, self.test_project.id, self.algorithm1.id, 'test params',
                                        status=model.STATUS_FINISHED, start_date=start_dates[i],
                                        completion_date=end_dates[i])
            operation = dao.store_entity(operation)
            storage_path = FilesHelper().get_project_folder(self.test_project, str(operation.id))
            if i < 4:
                datatype_inst = Datatype1()
                datatype_inst.type = "Datatype1"
                datatype_inst.subject = "John Doe" + str(i)
                datatype_inst.state = "RAW"
                datatype_inst.set_operation_id(operation.id)
                dao.store_entity(datatype_inst)
            else:
                for _ in range(2):
                    datatype_inst = Datatype2()
                    datatype_inst.storage_path = storage_path
                    datatype_inst.type = "Datatype2"
                    datatype_inst.subject = "John Doe" + str(i)
                    datatype_inst.state = "RAW"
                    datatype_inst.string_data = ["data"]
                    datatype_inst.set_operation_id(operation.id)
                    dao.store_entity(datatype_inst)

        returned_data = self.flow_service.get_available_datatypes(self.test_project.id, Datatype1)[0]
        for row in returned_data:
            if row[1] != 'Datatype1':
                self.fail("Some invalid data was returned!")
        self.assertEqual(4, len(returned_data), "Invalid length of result")

        filter_op = FilterChain(fields=[FilterChain.datatype + ".state", FilterChain.operation + ".start_date"],
                                values=["RAW", datetime.strptime("08-01-2010", "%m-%d-%Y")], operations=["==", ">"])
        returned_data = self.flow_service.get_available_datatypes(self.test_project.id, Datatype1, filter_op)[0]
        returned_subjects = [one_data[3] for one_data in returned_data]

        if "John Doe0" not in returned_subjects or "John Doe1" not in returned_subjects or len(returned_subjects) != 2:
            self.fail("DataTypes were not filtered properly!")
class SpatioTemporalController(BaseController):
    """
    Base class which contains methods related to spatio-temporal actions.
    """
    def __init__(self):
        BaseController.__init__(self)
        self.flow_service = FlowService()
        self.logger = get_logger(__name__)
        editable_entities = [
            dict(link='/spatial/stimulus/region/step_1_submit/1/1',
                 title='Region Stimulus',
                 subsection='regionstim',
                 description='Create a new Stimulus on Region level'),
            dict(link='/spatial/stimulus/surface/step_1_submit/1/1',
                 title='Surface Stimulus',
                 subsection='surfacestim',
                 description='Create a new Stimulus on Surface level')
        ]
        self.submenu_list = editable_entities

    @expose_page
    @settings
    def index(self, **data):
        """
        Displays the main page for the spatio temporal section.
        """
        template_specification = {
            'title': "Spatio temporal",
            'data': data,
            'mainContent': 'header_menu'
        }
        return self.fill_default_attributes(template_specification)

    @staticmethod
    def display_surface(surface_gid):
        """
        Generates the HTML for displaying the surface with the given ID.
        """
        surface = ABCAdapter.load_entity_by_gid(surface_gid)
        common.add2session(PARAM_SURFACE, surface_gid)
        url_vertices_pick, url_normals_pick, url_triangles_pick = surface.get_urls_for_pick_rendering(
        )
        url_vertices, url_normals, _, url_triangles = surface.get_urls_for_rendering(
        )

        return {
            'urlVerticesPick': json.dumps(url_vertices_pick),
            'urlTrianglesPick': json.dumps(url_triangles_pick),
            'urlNormalsPick': json.dumps(url_normals_pick),
            'urlVertices': json.dumps(url_vertices),
            'urlTriangles': json.dumps(url_triangles),
            'urlNormals': json.dumps(url_normals),
            'brainCenter': json.dumps(surface.center())
        }

    @staticmethod
    def prepare_entity_interface(input_list):
        """
        Prepares the input tree obtained from a creator.
        """
        return {'inputList': input_list, common.KEY_PARAMETERS_CONFIG: False}

    def get_creator_and_interface(self,
                                  creator_module,
                                  creator_class,
                                  datatype_instance,
                                  lock_midpoint_for_eq=None):
        """
        Returns a Tuple: a creator instance and a dictionary for the creator interface.
        The interface is prepared for rendering, it is populated with existent data, in case of a
        parameter of type DataType. The name of the attributes are also prefixed to identify groups.
        """
        algo_group = self.flow_service.get_algorithm_by_module_and_class(
            creator_module, creator_class)[1]
        group, _ = self.flow_service.prepare_adapter(
            common.get_current_project().id, algo_group)

        #I didn't use the interface(from the above line) returned by the method 'prepare_adapter' from flow service
        # because the selects that display dataTypes will also have the 'All' entry.
        datatype_instance.trait.bound = traited_interface.INTERFACE_ATTRIBUTES_ONLY
        input_list = datatype_instance.interface[
            traited_interface.INTERFACE_ATTRIBUTES]
        if lock_midpoint_for_eq is not None:
            for idx in lock_midpoint_for_eq:
                input_list[idx] = self._lock_midpoints(input_list[idx])
        category = self.flow_service.get_visualisers_category()
        input_list = self.flow_service.prepare_parameters(
            input_list,
            common.get_current_project().id, category.id)
        input_list = ABCAdapter.prepare_param_names(input_list)

        return self.flow_service.build_adapter_instance(group), input_list

    @staticmethod
    def get_series_json(data, label):
        """ For each data point entry, build the FLOT specific JSON. """
        return '{"data": %s, "label": "%s"}' % (json.dumps(data), label)

    @staticmethod
    def build_final_json(list_of_series):
        """ Given a list with all the data points, build the final FLOT json. """
        return '[' + ','.join(list_of_series) + ']'

    @staticmethod
    def get_ui_message(list_of_equation_names):
        """
        The message returned by this method should be displayed if
        the equation with the given name couldn't be evaluated in all points.
        """
        if list_of_equation_names:
            return ("Could not evaluate the " +
                    ", ".join(list_of_equation_names) + " equation(s) "
                    "in all the points. Some of the values were changed.")
        else:
            return ""

    def get_select_existent_entities(self,
                                     label,
                                     entity_type,
                                     entity_gid=None):
        """
        Returns the dictionary needed for drawing the select which display all
        the created entities of the specified type.
        """
        project_id = common.get_current_project().id
        category = self.flow_service.get_visualisers_category()

        interface = [{
            'name': 'existentEntitiesSelect',
            'label': label,
            'type': entity_type
        }]
        if entity_gid is not None:
            interface[0]['default'] = entity_gid
        interface = self.flow_service.prepare_parameters(
            interface, project_id, category.id)
        interface = ABCAdapter.prepare_param_names(interface)

        return interface

    @staticmethod
    def add_interface_to_session(left_input_tree, right_input_tree):
        """
        left_input_tree and right_input_tree are expected to be lists of dictionaries.

        Those 2 given lists will be concatenated and added to session.
        In order to work the filters, the interface should be added to session.
        """
        entire_tree = deepcopy(left_input_tree)
        entire_tree.extend(right_input_tree)
        SelectedAdapterContext().add_adapter_to_session(None, entire_tree)

    def fill_default_attributes(self,
                                template_dictionary,
                                subsection='stimulus'):
        """
        Overwrite base controller to add required parameters for adapter templates.
        """
        template_dictionary[common.KEY_SECTION] = 'stimulus'
        template_dictionary[common.KEY_SUB_SECTION] = subsection
        template_dictionary[common.KEY_SUBMENU_LIST] = self.submenu_list
        template_dictionary[
            common.KEY_INCLUDE_RESOURCES] = 'spatial/included_resources'
        BaseController.fill_default_attributes(self, template_dictionary)
        return template_dictionary

    def get_x_axis_range(self, min_x_str, max_x_str):
        """
        Fill range for the X-axis displayed in 2D graph.
        """
        try:
            min_x = int(min_x_str)
        except ValueError:
            return 0, 100, "The min value for the x-axis should be an integer value."

        try:
            max_x = int(max_x_str)
        except ValueError:
            return 0, 100, "The max value for the x-axis should be an integer value."

        if min_x >= max_x:
            return 0, 100, "The min value for the x-axis should be smaller then the max value of the x-axis."

        return min_x, max_x, ''

    @staticmethod
    def _lock_midpoints(equations_dict):
        """
        Set mid-points for gaussian / double gausians as locked to 0.0 in case of spatial equations.
        """
        for equation in equations_dict[ABCAdapter.KEY_OPTIONS]:
            if equation[ABCAdapter.KEY_NAME] == 'Gaussian':
                for entry in equation[ABCAdapter.KEY_ATTRIBUTES][1][
                        ABCAdapter.KEY_ATTRIBUTES]:
                    if entry[ABCAdapter.KEY_NAME] == 'midpoint':
                        entry['locked'] = True
            if equation[ABCAdapter.KEY_NAME] == 'DoubleGaussian':
                for entry in equation[ABCAdapter.KEY_ATTRIBUTES][1][
                        ABCAdapter.KEY_ATTRIBUTES]:
                    if entry[ABCAdapter.KEY_NAME] == 'midpoint1':
                        entry['locked'] = True
        return equations_dict
class ProjectStructureTest(TransactionalTestCase):
    """
    Test ProjectService methods (part related to Project Data Structure).
    """
    def setUp(self):
        """
        Prepare before each test.
        """
        self.project_service = ProjectService()
        self.flow_service = FlowService()
        self.structure_helper = FilesHelper()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user,
                                                       "ProjectStructure")

        self.relevant_filter = StaticFiltersFactory.build_datatype_filters(
            single_filter=StaticFiltersFactory.RELEVANT_VIEW)
        self.full_filter = StaticFiltersFactory.build_datatype_filters(
            single_filter=StaticFiltersFactory.FULL_VIEW)

    def tearDown(self):
        """
        Clear project folders after testing
        """
        self.delete_project_folders()

    def test_set_operation_visibility(self):
        """
        Check if the visibility for an operation is set correct.
        """
        self.__init_algorithmn()
        op1 = model.Operation(self.test_user.id, self.test_project.id,
                              self.algo_inst.id, "")
        op1 = dao.store_entity(op1)
        self.assertTrue(op1.visible, "The operation should be visible.")
        self.project_service.set_operation_and_group_visibility(op1.gid, False)
        updated_op = dao.get_operation_by_id(op1.id)
        self.assertFalse(updated_op.visible,
                         "The operation should not be visible.")

    def test_set_op_and_group_visibility(self):
        """
        When changing the visibility for an operation that belongs to an operation group, we
        should also change the visibility for the entire group of operations.
        """
        _, group_id = TestFactory.create_group(self.test_user,
                                               subject="test-subject-1")
        list_of_operations = dao.get_operations_in_group(group_id)
        for operation in list_of_operations:
            self.assertTrue(operation.visible,
                            "The operation should be visible.")
        self.project_service.set_operation_and_group_visibility(
            list_of_operations[0].gid, False)
        operations = dao.get_operations_in_group(group_id)
        for operation in operations:
            self.assertFalse(operation.visible,
                             "The operation should not be visible.")

    def test_set_op_group_visibility(self):
        """
        Tests if the visibility for an operation group is set correct.
        """
        _, group_id = TestFactory.create_group(self.test_user,
                                               subject="test-subject-1")
        list_of_operations = dao.get_operations_in_group(group_id)
        for operation in list_of_operations:
            self.assertTrue(operation.visible,
                            "The operation should be visible.")
        op_group = dao.get_operationgroup_by_id(group_id)
        self.project_service.set_operation_and_group_visibility(
            op_group.gid, False, True)
        operations = dao.get_operations_in_group(group_id)
        for operation in operations:
            self.assertFalse(operation.visible,
                             "The operation should not be visible.")

    def test_is_upload_operation(self):
        """
        Tests that upload and non-upload algorithms are created and run accordingly
        """
        self.__init_algorithmn()
        upload_algo = self._create_algo_for_upload()
        op1 = model.Operation(self.test_user.id, self.test_project.id,
                              self.algo_inst.id, "")
        op2 = model.Operation(self.test_user.id, self.test_project.id,
                              upload_algo.id, "")
        operations = dao.store_entities([op1, op2])
        is_upload_operation = self.project_service.is_upload_operation(
            operations[0].gid)
        self.assertFalse(is_upload_operation,
                         "The operation is not an upload operation.")
        is_upload_operation = self.project_service.is_upload_operation(
            operations[1].gid)
        self.assertTrue(is_upload_operation,
                        "The operation is an upload operation.")

    def test_get_upload_operations(self):
        """
        Test get_all when filter is for Upload category.
        """
        self.__init_algorithmn()
        upload_algo = self._create_algo_for_upload()

        project = model.Project("test_proj_2", self.test_user.id, "desc")
        project = dao.store_entity(project)

        op1 = model.Operation(self.test_user.id, self.test_project.id,
                              self.algo_inst.id, "")
        op2 = model.Operation(self.test_user.id,
                              project.id,
                              upload_algo.id,
                              "",
                              status=model.STATUS_FINISHED)
        op3 = model.Operation(self.test_user.id, self.test_project.id,
                              upload_algo.id, "")
        op4 = model.Operation(self.test_user.id,
                              self.test_project.id,
                              upload_algo.id,
                              "",
                              status=model.STATUS_FINISHED)
        op5 = model.Operation(self.test_user.id,
                              self.test_project.id,
                              upload_algo.id,
                              "",
                              status=model.STATUS_FINISHED)
        operations = dao.store_entities([op1, op2, op3, op4, op5])

        upload_operations = self.project_service.get_all_operations_for_uploaders(
            self.test_project.id)
        self.assertEqual(2, len(upload_operations),
                         "Wrong number of upload operations.")
        upload_ids = [operation.id for operation in upload_operations]
        for i in [3, 4]:
            self.assertTrue(operations[i].id in upload_ids,
                            "The operation should be an upload operation.")
        for i in [0, 1, 2]:
            self.assertFalse(
                operations[i].id in upload_ids,
                "The operation should not be an upload operation.")

    def test_is_datatype_group(self):
        """
        Tests if a datatype is group.
        """
        _, dt_group_id, first_dt, _ = self._create_datatype_group()
        dt_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0]
        is_dt_group = self.project_service.is_datatype_group(dt_group.gid)
        self.assertTrue(is_dt_group,
                        "The datatype should be a datatype group.")
        is_dt_group = self.project_service.is_datatype_group(first_dt.gid)
        self.assertFalse(is_dt_group,
                         "The datatype should not be a datatype group.")

    def test_count_datatypes_in_group(self):
        """ Test that counting dataTypes is correct. Happy flow."""
        _, dt_group_id, first_dt, _ = self._create_datatype_group()
        count = dao.count_datatypes_in_group(dt_group_id)
        self.assertEqual(count, 2)
        count = dao.count_datatypes_in_group(first_dt.id)
        self.assertEqual(count, 0, "There should be no dataType.")

    def test_set_datatype_visibility(self):
        """
        Check if the visibility for a datatype is set correct.
        """
        #it's a list of 3 elem.
        mapped_arrays = self._create_mapped_arrays(self.test_project.id)
        for mapped_array in mapped_arrays:
            is_visible = dao.get_datatype_by_id(mapped_array[0]).visible
            self.assertTrue(is_visible, "The data type should be visible.")

        self.project_service.set_datatype_visibility(mapped_arrays[0][2],
                                                     False)
        for i in xrange(len(mapped_arrays)):
            is_visible = dao.get_datatype_by_id(mapped_arrays[i][0]).visible
            if not i:
                self.assertFalse(is_visible,
                                 "The data type should not be visible.")
            else:
                self.assertTrue(is_visible, "The data type should be visible.")

    def test_set_visibility_for_dt_in_group(self):
        """
        Check if the visibility for a datatype from a datatype group is set correct.
        """
        _, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        self.assertTrue(first_dt.visible, "The data type should be visible.")
        self.assertTrue(second_dt.visible, "The data type should be visible.")
        self.project_service.set_datatype_visibility(first_dt.gid, False)

        db_dt_group = self.project_service.get_datatype_by_id(dt_group_id)
        db_first_dt = self.project_service.get_datatype_by_id(first_dt.id)
        db_second_dt = self.project_service.get_datatype_by_id(second_dt.id)

        self.assertFalse(db_dt_group.visible,
                         "The data type should be visible.")
        self.assertFalse(db_first_dt.visible,
                         "The data type should not be visible.")
        self.assertFalse(db_second_dt.visible,
                         "The data type should be visible.")

    def test_set_visibility_for_group(self):
        """
        Check if the visibility for a datatype group is set correct.
        """
        _, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        dt_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0]

        self.assertTrue(dt_group.visible,
                        "The data type group should be visible.")
        self.assertTrue(first_dt.visible, "The data type should be visible.")
        self.assertTrue(second_dt.visible, "The data type should be visible.")
        self.project_service.set_datatype_visibility(dt_group.gid, False)

        updated_dt_group = self.project_service.get_datatype_by_id(dt_group_id)
        updated_first_dt = self.project_service.get_datatype_by_id(first_dt.id)
        updated_second_dt = self.project_service.get_datatype_by_id(
            second_dt.id)

        self.assertFalse(updated_dt_group.visible,
                         "The data type group should be visible.")
        self.assertFalse(updated_first_dt.visible,
                         "The data type should be visible.")
        self.assertFalse(updated_second_dt.visible,
                         "The data type should be visible.")

    def test_getdatatypes_from_dtgroup(self):
        """
        Validate that we can retrieve all DTs from a DT_Group
        """
        _, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        datatypes = self.project_service.get_datatypes_from_datatype_group(
            dt_group_id)
        self.assertEqual(
            len(datatypes), 2,
            "There should be 2 datatypes into the datatype group.")
        expected_dict = {first_dt.id: first_dt, second_dt.id: second_dt}
        actual_dict = {
            datatypes[0].id: datatypes[0],
            datatypes[1].id: datatypes[1]
        }

        for key in expected_dict.keys():
            expected = expected_dict[key]
            actual = actual_dict[key]
            self.assertEqual(expected.id, actual.id, "Not the same id.")
            self.assertEqual(expected.gid, actual.gid, "Not the same gid.")
            self.assertEqual(expected.type, actual.type, "Not the same type.")
            self.assertEqual(expected.subject, actual.subject,
                             "Not the same subject.")
            self.assertEqual(expected.state, actual.state,
                             "Not the same state.")
            self.assertEqual(expected.visible, actual.visible,
                             "The datatype visibility is not correct.")
            self.assertEqual(expected.module, actual.module,
                             "Not the same module.")
            self.assertEqual(expected.user_tag_1, actual.user_tag_1,
                             "Not the same user_tag_1.")
            self.assertEqual(expected.invalid, actual.invalid,
                             "The invalid field value is not correct.")
            self.assertEqual(expected.is_nan, actual.is_nan,
                             "The is_nan field value is not correct.")

    def test_get_operations_for_dt(self):
        """
        Tests method get_operations_for_datatype.
        Verifies result dictionary has the correct values
        """
        created_ops, datatype_gid = self._create_operations_with_inputs()
        operations = self.project_service.get_operations_for_datatype(
            datatype_gid, self.relevant_filter)
        self.assertEqual(len(operations), 2)
        self.assertTrue(
            created_ops[0].id in [operations[0].id, operations[1].id],
            "Retrieved wrong operations.")
        self.assertTrue(
            created_ops[2].id in [operations[0].id, operations[1].id],
            "Retrieved wrong operations.")

        operations = self.project_service.get_operations_for_datatype(
            datatype_gid, self.full_filter)
        self.assertEqual(len(operations), 4)
        ids = [
            operations[0].id, operations[1].id, operations[2].id,
            operations[3].id
        ]
        for i in range(4):
            self.assertTrue(created_ops[i].id in ids,
                            "Retrieved wrong operations.")

        operations = self.project_service.get_operations_for_datatype(
            datatype_gid, self.relevant_filter, True)
        self.assertEqual(len(operations), 1)
        self.assertEqual(created_ops[4].id, operations[0].id,
                         "Incorrect number of operations.")

        operations = self.project_service.get_operations_for_datatype(
            datatype_gid, self.full_filter, True)
        self.assertEqual(len(operations), 2)
        self.assertTrue(
            created_ops[4].id in [operations[0].id, operations[1].id],
            "Retrieved wrong operations.")
        self.assertTrue(
            created_ops[5].id in [operations[0].id, operations[1].id],
            "Retrieved wrong operations.")

    def test_get_operations_for_dt_group(self):
        """
        Tests method get_operations_for_datatype_group.
        Verifies filters' influence over results is as expected
        """
        created_ops, dt_group_id = self._create_operations_with_inputs(True)

        ops = self.project_service.get_operations_for_datatype_group(
            dt_group_id, self.relevant_filter)
        self.assertEqual(len(ops), 2)
        self.assertTrue(created_ops[0].id in [ops[0].id, ops[1].id],
                        "Retrieved wrong operations.")
        self.assertTrue(created_ops[2].id in [ops[0].id, ops[1].id],
                        "Retrieved wrong operations.")

        ops = self.project_service.get_operations_for_datatype_group(
            dt_group_id, self.full_filter)
        self.assertEqual(len(ops), 4, "Incorrect number of operations.")
        ids = [ops[0].id, ops[1].id, ops[2].id, ops[3].id]
        for i in range(4):
            self.assertTrue(created_ops[i].id in ids,
                            "Retrieved wrong operations.")

        ops = self.project_service.get_operations_for_datatype_group(
            dt_group_id, self.relevant_filter, True)
        self.assertEqual(len(ops), 1)
        self.assertEqual(created_ops[4].id, ops[0].id,
                         "Incorrect number of operations.")

        ops = self.project_service.get_operations_for_datatype_group(
            dt_group_id, self.full_filter, True)
        self.assertEqual(len(ops), 2)
        self.assertTrue(created_ops[4].id in [ops[0].id, ops[1].id],
                        "Retrieved wrong operations.")
        self.assertTrue(created_ops[5].id in [ops[0].id, ops[1].id],
                        "Retrieved wrong operations.")

    def test_get_inputs_for_operation(self):
        """
        Tests method get_datatype_and_datatypegroup_inputs_for_operation.
        Verifies filters' influence over results is as expected
        """
        algo_group = dao.find_group(
            'tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        algo = dao.get_algorithm_by_group(algo_group.id)

        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        ids = []
        for datatype in array_wrappers:
            ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        parameters = json.dumps({
            "param_5": "1",
            "param_1": array_wrappers[0][2],
            "param_2": array_wrappers[1][2],
            "param_3": array_wrappers[2][2],
            "param_6": "0"
        })
        operation = model.Operation(self.test_user.id, self.test_project.id,
                                    algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(
            operation.gid, self.relevant_filter)
        self.assertEqual(len(inputs), 2)
        self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id],
                        "Retrieved wrong dataType.")
        self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id],
                        "Retrieved wrong dataType.")
        self.assertFalse(ids[0] in [inputs[0].id, inputs[1].id],
                         "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(
            operation.gid, self.full_filter)
        self.assertEqual(len(inputs), 3, "Incorrect number of operations.")
        self.assertTrue(ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id],
                        "Retrieved wrong dataType.")
        self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id],
                        "Retrieved wrong dataType.")
        self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id],
                        "Retrieved wrong dataType.")

        project, dt_group_id, first_dt, _ = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        parameters = json.dumps({"other_param": "_", "param_1": first_dt.gid})
        operation = model.Operation(self.test_user.id, project.id, algo.id,
                                    parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(
            operation.gid, self.relevant_filter)
        self.assertEqual(len(inputs), 0, "Incorrect number of dataTypes.")
        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(
            operation.gid, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertEqual(inputs[0].id, dt_group_id, "Wrong dataType.")
        self.assertTrue(inputs[0].id != first_dt.id, "Wrong dataType.")

    def test_get_inputs_for_op_group(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The DataType inputs will be from a DataType group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group(
        )
        first_dt.visible = False
        dao.store_entity(first_dt)
        second_dt.visible = False
        dao.store_entity(second_dt)

        op_group = model.OperationGroup(project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({
            "param_5": "1",
            "param_1": first_dt.gid,
            "param_6": "2"
        })
        params_2 = json.dumps({
            "param_5": "1",
            "param_4": second_dt.gid,
            "param_6": "5"
        })

        algo_group = dao.find_group(
            'tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        algo = dao.get_algorithm_by_group(algo_group.id)

        op1 = model.Operation(self.test_user.id,
                              project.id,
                              algo.id,
                              params_1,
                              op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id,
                              project.id,
                              algo.id,
                              params_2,
                              op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(
            op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 0)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(
            op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id,
                         "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id,
                         "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id,
                        "Retrieved wrong dataType.")

        first_dt.visible = True
        dao.store_entity(first_dt)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(
            op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id,
                         "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id,
                         "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id,
                        "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(
            op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id,
                         "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id,
                         "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id,
                        "Retrieved wrong dataType.")

    def test_get_inputs_for_op_group_simple_inputs(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The dataType inputs will not be part of a dataType group.
        """
        #it's a list of 3 elem.
        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        array_wrapper_ids = []
        for datatype in array_wrappers:
            array_wrapper_ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(array_wrapper_ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        op_group = model.OperationGroup(self.test_project.id, "group",
                                        "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({
            "param_5": "2",
            "param_1": array_wrappers[0][2],
            "param_2": array_wrappers[1][2],
            "param_6": "7"
        })
        params_2 = json.dumps({
            "param_5": "5",
            "param_3": array_wrappers[2][2],
            "param_2": array_wrappers[1][2],
            "param_6": "6"
        })

        algo_group = dao.find_group(
            'tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        algo = dao.get_algorithm_by_group(algo_group.id)

        op1 = model.Operation(self.test_user.id,
                              self.test_project.id,
                              algo.id,
                              params_1,
                              op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id,
                              self.test_project.id,
                              algo.id,
                              params_2,
                              op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(
            op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 2)
        self.assertFalse(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id],
                         "Retrieved wrong dataType.")
        self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id],
                        "Retrieved wrong dataType.")
        self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id],
                        "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(
            op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 3, "Incorrect number of dataTypes.")
        self.assertTrue(
            array_wrapper_ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id])
        self.assertTrue(
            array_wrapper_ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id])
        self.assertTrue(
            array_wrapper_ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id])

    def test_remove_datatype(self):
        """
        Tests the deletion of a datatype.
        """
        #it's a list of 3 elem.
        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        dt_list = []
        for array_wrapper in array_wrappers:
            dt_list.append(dao.get_datatype_by_id(array_wrapper[0]))

        self.project_service.remove_datatype(self.test_project.id,
                                             dt_list[0].gid)
        self._check_if_datatype_was_removed(dt_list[0])

    def test_remove_datatype_from_group(self):
        """
        Tests the deletion of a datatype group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group(
        )
        datatype_group = dao.get_generic_entity(model.DataTypeGroup,
                                                dt_group_id)[0]

        self.project_service.remove_datatype(project.id, first_dt.gid)
        self._check_if_datatype_was_removed(first_dt)
        self._check_if_datatype_was_removed(second_dt)
        self._check_if_datatype_was_removed(datatype_group)
        self._check_datatype_group_removed(dt_group_id,
                                           datatype_group.fk_operation_group)

    def test_remove_datatype_group(self):
        """
        Tests the deletion of a datatype group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group(
        )
        datatype_group = dao.get_generic_entity(model.DataTypeGroup,
                                                dt_group_id)[0]

        self.project_service.remove_datatype(project.id, datatype_group.gid)
        self._check_if_datatype_was_removed(first_dt)
        self._check_if_datatype_was_removed(second_dt)
        self._check_if_datatype_was_removed(datatype_group)
        self._check_datatype_group_removed(dt_group_id,
                                           datatype_group.fk_operation_group)

    def _create_mapped_arrays(self, project_id):
        """
        :param project_id: the project in which the arrays are created
        :return: a list of dummy `MappedArray`
        """
        count = self.flow_service.get_available_datatypes(
            project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 0)

        algo_group = dao.find_group(
            'tvb.tests.framework.adapters.ndimensionarrayadapter',
            'NDimensionArrayAdapter')
        group, _ = self.flow_service.prepare_adapter(project_id, algo_group)

        adapter_instance = self.flow_service.build_adapter_instance(group)
        data = {'param_1': 'some value'}
        #create 3 data types
        self.flow_service.fire_operation(adapter_instance, self.test_user,
                                         project_id, **data)
        count = self.flow_service.get_available_datatypes(
            project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 1)

        self.flow_service.fire_operation(adapter_instance, self.test_user,
                                         project_id, **data)
        count = self.flow_service.get_available_datatypes(
            project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 2)

        self.flow_service.fire_operation(adapter_instance, self.test_user,
                                         project_id, **data)
        array_wrappers, count = self.flow_service.get_available_datatypes(
            project_id, "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(count, 3)

        return array_wrappers

    def _create_operation(self, project_id, algorithm_id):
        """
        dummy operation
        :param project_id: the project in which the operation is created
        :param algorithm_id: the algorithm to be run for the operation
        :return: a dummy `Operation` with the given specifications
        """
        algorithm = dao.get_algorithm_by_id(algorithm_id)
        meta = {
            DataTypeMetaData.KEY_SUBJECT: "John Doe",
            DataTypeMetaData.KEY_STATE: "RAW_DATA"
        }
        operation = model.Operation(self.test_user.id,
                                    project_id,
                                    algorithm.id,
                                    'test params',
                                    meta=json.dumps(meta),
                                    status=model.STATUS_FINISHED)
        return dao.store_entity(operation)

    def _create_datatype_group(self):
        """
        Creates a project, one DataTypeGroup with 2 DataTypes into the new group.
        """
        test_project = TestFactory.create_project(self.test_user, "NewProject")

        all_operations = dao.get_filtered_operations(test_project.id,
                                                     None,
                                                     is_count=True)
        self.assertEqual(0, all_operations, "There should be no operation.")

        datatypes, op_group_id = TestFactory.create_group(
            self.test_user, test_project)
        dt_group = dao.get_datatypegroup_by_op_group_id(op_group_id)

        return test_project, dt_group.id, datatypes[0], datatypes[1]

    def _create_operations_with_inputs(self, is_group_parent=False):
        """
        Method used for creating a complex tree of operations.

        If 'if_group_parent' is True then a new group will be created and one of its entries it will be used as
        input for the returned operations.
        """
        group_dts, root_op_group_id = TestFactory.create_group(
            self.test_user, self.test_project)
        if is_group_parent:
            datatype_gid = group_dts[0].gid
        else:
            datatype_gid = ProjectServiceTest._create_value_wrapper(
                self.test_user, self.test_project)[1]

        parameters = json.dumps({"param_name": datatype_gid})

        ops = []
        for i in range(4):
            ops.append(
                TestFactory.create_operation(test_user=self.test_user,
                                             test_project=self.test_project))
            if i in [1, 3]:
                ops[i].visible = False
            ops[i].parameters = parameters
            ops[i] = dao.store_entity(ops[i])

        #groups
        _, ops_group = TestFactory.create_group(self.test_user,
                                                self.test_project)
        ops_group = dao.get_operations_in_group(ops_group)
        self.assertEqual(2, len(ops_group))
        ops_group[0].parameters = parameters
        ops_group[0] = dao.store_entity(ops_group[0])
        ops_group[1].visible = False
        ops_group[1].parameters = parameters
        ops_group[1] = dao.store_entity(ops_group[1])

        ops.extend(ops_group)
        if is_group_parent:
            dt_group = dao.get_datatypegroup_by_op_group_id(root_op_group_id)
            return ops, dt_group.id
        return ops, datatype_gid

    def _check_if_datatype_was_removed(self, datatype):
        """
        Check if a certain datatype was removed.
        """
        try:
            dao.get_datatype_by_id(datatype.id)
            self.fail("The datatype was not deleted.")
        except Exception:
            pass
        try:
            dao.get_operation_by_id(datatype.fk_from_operation)
            self.fail("The operation was not deleted.")
        except Exception:
            pass

    def _check_datatype_group_removed(self, datatype_group_id,
                                      operation_groupp_id):
        """
        Checks if the DataTypeGroup and OperationGroup was removed.
        """
        try:
            dao.get_generic_entity(model.DataTypeGroup, datatype_group_id)
            self.fail("The DataTypeGroup entity was not removed.")
        except Exception:
            pass

        try:
            dao.get_operationgroup_by_id(operation_groupp_id)
            self.fail("The OperationGroup entity was not removed.")
        except Exception:
            pass

    def __init_algorithmn(self):
        """
        Insert some starting data in the database.
        """
        categ1 = model.AlgorithmCategory('one', True)
        self.categ1 = dao.store_entity(categ1)
        algo = model.AlgorithmGroup(TEST_ADAPTER_VALID_MODULE,
                                    TEST_ADAPTER_VALID_CLASS, categ1.id)
        adapter = dao.store_entity(algo)
        algo = model.Algorithm(adapter.id,
                               'ident',
                               name='',
                               req_data='',
                               param_name='',
                               output='')
        self.algo_inst = dao.store_entity(algo)

    @staticmethod
    def _create_algo_for_upload():
        """ Creates a fake algorithm for an upload category. """
        category = dao.store_entity(
            model.AlgorithmCategory("upload_category", rawinput=True))
        algo_group = dao.store_entity(
            model.AlgorithmGroup("module", "classname", category.id))
        return dao.store_entity(model.Algorithm(algo_group.id, "algo"))
class ProjectStructureTest(TransactionalTestCase):
    """
    Test ProjectService methods (part related to Project Data Structure).
    """

    def setUp(self):
        """
        Prepare before each test.
        """
        self.project_service = ProjectService()
        self.flow_service = FlowService()
        self.structure_helper = FilesHelper()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, "ProjectStructure")

        self.relevant_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.RELEVANT_VIEW)
        self.full_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.FULL_VIEW)

    
    def tearDown(self):
        """
        Clear project folders after testing
        """
        self.delete_project_folders()


    def test_set_operation_visibility(self):
        """
        Check if the visibility for an operation is set correct.
        """
        self.__init_algorithmn()
        op1 = model.Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "")
        op1 = dao.store_entity(op1)
        self.assertTrue(op1.visible, "The operation should be visible.")
        self.project_service.set_operation_and_group_visibility(op1.gid, False)
        updated_op = dao.get_operation_by_id(op1.id)
        self.assertFalse(updated_op.visible, "The operation should not be visible.")


    def test_set_op_and_group_visibility(self):
        """
        When changing the visibility for an operation that belongs to an operation group, we
        should also change the visibility for the entire group of operations.
        """
        _, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1")
        list_of_operations = dao.get_operations_in_group(group_id)
        for operation in list_of_operations:
            self.assertTrue(operation.visible, "The operation should be visible.")
        self.project_service.set_operation_and_group_visibility(list_of_operations[0].gid, False)
        operations = dao.get_operations_in_group(group_id)
        for operation in operations:
            self.assertFalse(operation.visible, "The operation should not be visible.")


    def test_set_op_group_visibility(self):
        """
        Tests if the visibility for an operation group is set correct.
        """
        _, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1")
        list_of_operations = dao.get_operations_in_group(group_id)
        for operation in list_of_operations:
            self.assertTrue(operation.visible, "The operation should be visible.")
        op_group = dao.get_operationgroup_by_id(group_id)
        self.project_service.set_operation_and_group_visibility(op_group.gid, False, True)
        operations = dao.get_operations_in_group(group_id)
        for operation in operations:
            self.assertFalse(operation.visible, "The operation should not be visible.")


    def test_is_upload_operation(self):
        """
        Tests that upload and non-upload algorithms are created and run accordingly
        """
        self.__init_algorithmn()
        upload_algo = self._create_algo_for_upload()
        op1 = model.Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "")
        op2 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "")
        operations = dao.store_entities([op1, op2])
        is_upload_operation = self.project_service.is_upload_operation(operations[0].gid)
        self.assertFalse(is_upload_operation, "The operation is not an upload operation.")
        is_upload_operation = self.project_service.is_upload_operation(operations[1].gid)
        self.assertTrue(is_upload_operation, "The operation is an upload operation.")


    def test_get_upload_operations(self):
        """
        Test get_all when filter is for Upload category.
        """
        self.__init_algorithmn()
        upload_algo = self._create_algo_for_upload()

        project = model.Project("test_proj_2", self.test_user.id, "desc")
        project = dao.store_entity(project)

        op1 = model.Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "")
        op2 = model.Operation(self.test_user.id, project.id, upload_algo.id, "", status=model.STATUS_FINISHED)
        op3 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "")
        op4 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "", status=model.STATUS_FINISHED)
        op5 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "", status=model.STATUS_FINISHED)
        operations = dao.store_entities([op1, op2, op3, op4, op5])

        upload_operations = self.project_service.get_all_operations_for_uploaders(self.test_project.id)
        self.assertEqual(2, len(upload_operations), "Wrong number of upload operations.")
        upload_ids = [operation.id for operation in upload_operations]
        for i in [3, 4]:
            self.assertTrue(operations[i].id in upload_ids,
                            "The operation should be an upload operation.")
        for i in [0, 1, 2]:                    
            self.assertFalse(operations[i].id in upload_ids, 
                             "The operation should not be an upload operation.")


    def test_is_datatype_group(self):
        """
        Tests if a datatype is group.
        """
        _, dt_group_id, first_dt, _ = self._create_datatype_group()
        dt_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0]
        is_dt_group = self.project_service.is_datatype_group(dt_group.gid)
        self.assertTrue(is_dt_group, "The datatype should be a datatype group.")
        is_dt_group = self.project_service.is_datatype_group(first_dt.gid)
        self.assertFalse(is_dt_group, "The datatype should not be a datatype group.")


    def test_count_datatypes_in_group(self):
        """ Test that counting dataTypes is correct. Happy flow."""
        _, dt_group_id, first_dt, _ = self._create_datatype_group()
        count = dao.count_datatypes_in_group(dt_group_id)
        self.assertEqual(count, 2)
        count = dao.count_datatypes_in_group(first_dt.id)
        self.assertEqual(count, 0, "There should be no dataType.")


    def test_set_datatype_visibility(self):
        """
        Check if the visibility for a datatype is set correct.
        """
        #it's a list of 3 elem.
        mapped_arrays = self._create_mapped_arrays(self.test_project.id)
        for mapped_array in mapped_arrays:
            is_visible = dao.get_datatype_by_id(mapped_array[0]).visible
            self.assertTrue(is_visible, "The data type should be visible.")

        self.project_service.set_datatype_visibility(mapped_arrays[0][2], False)
        for i in xrange(len(mapped_arrays)):
            is_visible = dao.get_datatype_by_id(mapped_arrays[i][0]).visible
            if not i:
                self.assertFalse(is_visible, "The data type should not be visible.")
            else:
                self.assertTrue(is_visible, "The data type should be visible.")


    def test_set_visibility_for_dt_in_group(self):
        """
        Check if the visibility for a datatype from a datatype group is set correct.
        """
        _, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        self.assertTrue(first_dt.visible, "The data type should be visible.")
        self.assertTrue(second_dt.visible, "The data type should be visible.")
        self.project_service.set_datatype_visibility(first_dt.gid, False)

        db_dt_group = self.project_service.get_datatype_by_id(dt_group_id)
        db_first_dt = self.project_service.get_datatype_by_id(first_dt.id)
        db_second_dt = self.project_service.get_datatype_by_id(second_dt.id)

        self.assertFalse(db_dt_group.visible, "The data type should be visible.")
        self.assertFalse(db_first_dt.visible, "The data type should not be visible.")
        self.assertFalse(db_second_dt.visible, "The data type should be visible.")


    def test_set_visibility_for_group(self):
        """
        Check if the visibility for a datatype group is set correct.
        """
        _, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        dt_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0]

        self.assertTrue(dt_group.visible, "The data type group should be visible.")
        self.assertTrue(first_dt.visible, "The data type should be visible.")
        self.assertTrue(second_dt.visible, "The data type should be visible.")
        self.project_service.set_datatype_visibility(dt_group.gid, False)

        updated_dt_group = self.project_service.get_datatype_by_id(dt_group_id)
        updated_first_dt = self.project_service.get_datatype_by_id(first_dt.id)
        updated_second_dt = self.project_service.get_datatype_by_id(second_dt.id)

        self.assertFalse(updated_dt_group.visible, "The data type group should be visible.")
        self.assertFalse(updated_first_dt.visible, "The data type should be visible.")
        self.assertFalse(updated_second_dt.visible, "The data type should be visible.")


    def test_getdatatypes_from_dtgroup(self):
        """
        Validate that we can retrieve all DTs from a DT_Group
        """
        _, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        datatypes = self.project_service.get_datatypes_from_datatype_group(dt_group_id)
        self.assertEqual(len(datatypes), 2, "There should be 2 datatypes into the datatype group.")
        expected_dict = {first_dt.id: first_dt, second_dt.id: second_dt}
        actual_dict = {datatypes[0].id: datatypes[0], datatypes[1].id: datatypes[1]}

        for key in expected_dict.keys():
            expected = expected_dict[key]
            actual = actual_dict[key]
            self.assertEqual(expected.id, actual.id, "Not the same id.")
            self.assertEqual(expected.gid, actual.gid, "Not the same gid.")
            self.assertEqual(expected.type, actual.type, "Not the same type.")
            self.assertEqual(expected.subject, actual.subject, "Not the same subject.")
            self.assertEqual(expected.state, actual.state, "Not the same state.")
            self.assertEqual(expected.visible, actual.visible, "The datatype visibility is not correct.")
            self.assertEqual(expected.module, actual.module, "Not the same module.")
            self.assertEqual(expected.user_tag_1, actual.user_tag_1, "Not the same user_tag_1.")
            self.assertEqual(expected.invalid, actual.invalid, "The invalid field value is not correct.")
            self.assertEqual(expected.is_nan, actual.is_nan, "The is_nan field value is not correct.")


    def test_get_operations_for_dt(self):
        """
        Tests method get_operations_for_datatype.
        Verifies result dictionary has the correct values
        """
        created_ops, datatype_gid = self._create_operations_with_inputs()
        operations = self.project_service.get_operations_for_datatype(datatype_gid, self.relevant_filter)
        self.assertEqual(len(operations), 2)
        self.assertTrue(created_ops[0].id in [operations[0].id, operations[1].id], "Retrieved wrong operations.")
        self.assertTrue(created_ops[2].id in [operations[0].id, operations[1].id], "Retrieved wrong operations.")

        operations = self.project_service.get_operations_for_datatype(datatype_gid, self.full_filter)
        self.assertEqual(len(operations), 4)
        ids = [operations[0].id, operations[1].id, operations[2].id, operations[3].id]
        for i in range(4):
            self.assertTrue(created_ops[i].id in ids, "Retrieved wrong operations.")

        operations = self.project_service.get_operations_for_datatype(datatype_gid, self.relevant_filter, True)
        self.assertEqual(len(operations), 1)
        self.assertEqual(created_ops[4].id, operations[0].id, "Incorrect number of operations.")

        operations = self.project_service.get_operations_for_datatype(datatype_gid, self.full_filter, True)
        self.assertEqual(len(operations), 2)
        self.assertTrue(created_ops[4].id in [operations[0].id, operations[1].id], "Retrieved wrong operations.")
        self.assertTrue(created_ops[5].id in [operations[0].id, operations[1].id], "Retrieved wrong operations.")


    def test_get_operations_for_dt_group(self):
        """
        Tests method get_operations_for_datatype_group.
        Verifies filters' influence over results is as expected
        """
        created_ops, dt_group_id = self._create_operations_with_inputs(True)

        ops = self.project_service.get_operations_for_datatype_group(dt_group_id, self.relevant_filter)
        self.assertEqual(len(ops), 2)
        self.assertTrue(created_ops[0].id in [ops[0].id, ops[1].id], "Retrieved wrong operations.")
        self.assertTrue(created_ops[2].id in [ops[0].id, ops[1].id], "Retrieved wrong operations.")

        ops = self.project_service.get_operations_for_datatype_group(dt_group_id, self.full_filter)
        self.assertEqual(len(ops), 4, "Incorrect number of operations.")
        ids = [ops[0].id, ops[1].id, ops[2].id, ops[3].id]
        for i in range(4):
            self.assertTrue(created_ops[i].id in ids, "Retrieved wrong operations.")

        ops = self.project_service.get_operations_for_datatype_group(dt_group_id, self.relevant_filter, True)
        self.assertEqual(len(ops), 1)
        self.assertEqual(created_ops[4].id, ops[0].id, "Incorrect number of operations.")

        ops = self.project_service.get_operations_for_datatype_group(dt_group_id, self.full_filter, True)
        self.assertEqual(len(ops), 2)
        self.assertTrue(created_ops[4].id in [ops[0].id, ops[1].id], "Retrieved wrong operations.")
        self.assertTrue(created_ops[5].id in [ops[0].id, ops[1].id], "Retrieved wrong operations.")


    def test_get_inputs_for_operation(self):
        """
        Tests method get_datatype_and_datatypegroup_inputs_for_operation.
        Verifies filters' influence over results is as expected
        """
        algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        algo = dao.get_algorithm_by_group(algo_group.id)

        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        ids = []
        for datatype in array_wrappers:
            ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        parameters = json.dumps({"param_5": "1", "param_1": array_wrappers[0][2],
                                 "param_2": array_wrappers[1][2], "param_3": array_wrappers[2][2], "param_6": "0"})
        operation = model.Operation(self.test_user.id, self.test_project.id, algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.relevant_filter)
        self.assertEqual(len(inputs), 2)
        self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertFalse(ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.full_filter)
        self.assertEqual(len(inputs), 3, "Incorrect number of operations.")
        self.assertTrue(ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")

        project, dt_group_id, first_dt, _ = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        parameters = json.dumps({"other_param": "_", "param_1": first_dt.gid})
        operation = model.Operation(self.test_user.id, project.id, algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.relevant_filter)
        self.assertEqual(len(inputs), 0, "Incorrect number of dataTypes.")
        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertEqual(inputs[0].id, dt_group_id, "Wrong dataType.")
        self.assertTrue(inputs[0].id != first_dt.id, "Wrong dataType.")


    def test_get_inputs_for_op_group(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The DataType inputs will be from a DataType group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        second_dt.visible = False
        dao.store_entity(second_dt)

        op_group = model.OperationGroup(project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "1", "param_1": first_dt.gid, "param_6": "2"})
        params_2 = json.dumps({"param_5": "1", "param_4": second_dt.gid, "param_6": "5"})

        algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        algo = dao.get_algorithm_by_group(algo_group.id)

        op1 = model.Operation(self.test_user.id, project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id, project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 0)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")

        first_dt.visible = True
        dao.store_entity(first_dt)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")


    def test_get_inputs_for_op_group_simple_inputs(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The dataType inputs will not be part of a dataType group.
        """
        #it's a list of 3 elem.
        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        array_wrapper_ids = []
        for datatype in array_wrappers:
            array_wrapper_ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(array_wrapper_ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        op_group = model.OperationGroup(self.test_project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "2", "param_1": array_wrappers[0][2],
                               "param_2": array_wrappers[1][2], "param_6": "7"})
        params_2 = json.dumps({"param_5": "5", "param_3": array_wrappers[2][2],
                               "param_2": array_wrappers[1][2], "param_6": "6"})

        algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        algo = dao.get_algorithm_by_group(algo_group.id)

        op1 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 2)
        self.assertFalse(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 3, "Incorrect number of dataTypes.")
        self.assertTrue(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id])
        self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id])
        self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id])


    def test_remove_datatype(self):
        """
        Tests the deletion of a datatype.
        """
        #it's a list of 3 elem.
        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        dt_list = []
        for array_wrapper in array_wrappers:
            dt_list.append(dao.get_datatype_by_id(array_wrapper[0]))

        self.project_service.remove_datatype(self.test_project.id, dt_list[0].gid)
        self._check_if_datatype_was_removed(dt_list[0])


    def test_remove_datatype_from_group(self):
        """
        Tests the deletion of a datatype group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        datatype_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0]

        self.project_service.remove_datatype(project.id, first_dt.gid)
        self._check_if_datatype_was_removed(first_dt)
        self._check_if_datatype_was_removed(second_dt)
        self._check_if_datatype_was_removed(datatype_group)
        self._check_datatype_group_removed(dt_group_id, datatype_group.fk_operation_group)


    def test_remove_datatype_group(self):
        """
        Tests the deletion of a datatype group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        datatype_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0]

        self.project_service.remove_datatype(project.id, datatype_group.gid)
        self._check_if_datatype_was_removed(first_dt)
        self._check_if_datatype_was_removed(second_dt)
        self._check_if_datatype_was_removed(datatype_group)
        self._check_datatype_group_removed(dt_group_id, datatype_group.fk_operation_group)


    def _create_mapped_arrays(self, project_id):
        """
        :param project_id: the project in which the arrays are created
        :return: a list of dummy `MappedArray`
        """
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 0)
        
        algo_group = dao.find_group('tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter')
        group, _ = self.flow_service.prepare_adapter(project_id, algo_group)

        adapter_instance = self.flow_service.build_adapter_instance(group)
        data = {'param_1': 'some value'}
        #create 3 data types
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 1)
        
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 2)
        
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        array_wrappers, count = self.flow_service.get_available_datatypes(project_id,
                                                                          "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(count, 3)

        return array_wrappers


    def _create_operation(self, project_id, algorithm_id):
        """
        dummy operation
        :param project_id: the project in which the operation is created
        :param algorithm_id: the algorithm to be run for the operation
        :return: a dummy `Operation` with the given specifications
        """
        algorithm = dao.get_algorithm_by_id(algorithm_id)
        meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe",
                DataTypeMetaData.KEY_STATE: "RAW_DATA"}
        operation = model.Operation(self.test_user.id, project_id, algorithm.id, 'test params',
                                    meta=json.dumps(meta), status=model.STATUS_FINISHED)
        return dao.store_entity(operation)


    def _create_datatype_group(self):
        """
        Creates a project, one DataTypeGroup with 2 DataTypes into the new group.
        """
        test_project = TestFactory.create_project(self.test_user, "NewProject")

        all_operations = dao.get_filtered_operations(test_project.id, None, is_count=True)
        self.assertEqual(0, all_operations, "There should be no operation.")
        
        datatypes, op_group_id = TestFactory.create_group(self.test_user, test_project)
        dt_group = dao.get_datatypegroup_by_op_group_id(op_group_id)

        return test_project, dt_group.id, datatypes[0], datatypes[1]



    def _create_operations_with_inputs(self, is_group_parent=False):
        """
        Method used for creating a complex tree of operations.

        If 'if_group_parent' is True then a new group will be created and one of its entries it will be used as
        input for the returned operations.
        """
        group_dts, root_op_group_id = TestFactory.create_group(self.test_user, self.test_project)
        if is_group_parent:
            datatype_gid = group_dts[0].gid
        else:
            datatype_gid = ProjectServiceTest._create_value_wrapper(self.test_user, self.test_project)[1]

        parameters = json.dumps({"param_name": datatype_gid})

        ops = []
        for i in range(4):
            ops.append(TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project))
            if i in [1, 3]:
                ops[i].visible = False
            ops[i].parameters = parameters
            ops[i] = dao.store_entity(ops[i])
            
        #groups
        _, ops_group = TestFactory.create_group(self.test_user, self.test_project)
        ops_group = dao.get_operations_in_group(ops_group)
        self.assertEqual(2, len(ops_group))
        ops_group[0].parameters = parameters
        ops_group[0] = dao.store_entity(ops_group[0])
        ops_group[1].visible = False
        ops_group[1].parameters = parameters
        ops_group[1] = dao.store_entity(ops_group[1])

        ops.extend(ops_group)
        if is_group_parent:
            dt_group = dao.get_datatypegroup_by_op_group_id(root_op_group_id)
            return ops, dt_group.id
        return ops, datatype_gid


    def _check_if_datatype_was_removed(self, datatype):
        """
        Check if a certain datatype was removed.
        """
        try:
            dao.get_datatype_by_id(datatype.id)
            self.fail("The datatype was not deleted.")
        except Exception:
            pass
        try:
            dao.get_operation_by_id(datatype.fk_from_operation)
            self.fail("The operation was not deleted.")
        except Exception:
            pass


    def _check_datatype_group_removed(self, datatype_group_id, operation_groupp_id):
        """
        Checks if the DataTypeGroup and OperationGroup was removed.
        """
        try:
            dao.get_generic_entity(model.DataTypeGroup, datatype_group_id)
            self.fail("The DataTypeGroup entity was not removed.")
        except Exception:
            pass

        try:
            dao.get_operationgroup_by_id(operation_groupp_id)
            self.fail("The OperationGroup entity was not removed.")
        except Exception:
            pass


    def __init_algorithmn(self):
        """
        Insert some starting data in the database.
        """
        categ1 = model.AlgorithmCategory('one', True)
        self.categ1 = dao.store_entity(categ1)
        algo = model.AlgorithmGroup(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS, categ1.id)
        adapter = dao.store_entity(algo)
        algo = model.Algorithm(adapter.id, 'ident', name='', req_data='', param_name='', output='')
        self.algo_inst = dao.store_entity(algo)

    @staticmethod
    def _create_algo_for_upload():
        """ Creates a fake algorithm for an upload category. """
        category = dao.store_entity(model.AlgorithmCategory("upload_category", rawinput=True))
        algo_group = dao.store_entity(model.AlgorithmGroup("module", "classname", category.id))
        return dao.store_entity(model.Algorithm(algo_group.id, "algo"))
Exemple #11
0
class SerializationManager(object):
    """
    Constructs data types based on a burst configuration.
    Updates the burst configuration.
    """
    def __init__(self, conf):
        """
        :param conf: burst configuration entity
        """
        self.logger = get_logger(__name__)
        self.flow_service = FlowService()
        self.conf = conf


    def _build_simulator_adapter(self):
        _, group = self.flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
        return self.flow_service.build_adapter_instance(group)


    def has_model_pse_ranges(self):
        """ Returns True if the burst configuration describes a range on a model parameter """
        first_range = self.conf.get_simulation_parameter_value(RANGE_PARAMETER_1)
        second_range = self.conf.get_simulation_parameter_value(RANGE_PARAMETER_2)
        first_range_on = first_range is not None and str(first_range).startswith(MODEL_PARAMETERS)
        second_range_on = second_range is not None and str(second_range).startswith(MODEL_PARAMETERS)
        return first_range_on or second_range_on


    def _get_params_dict(self):
        """ Convert ui inputs from the configuration to python types """
        simulator_adapter = self._build_simulator_adapter()
        return simulator_adapter.convert_ui_inputs(self.conf.get_all_simulator_values()[0], False)


    def __make_instance_from_burst_config(self, params_dict, parent_class, class_name_key, params_key):
        """ This is used internally to create a model or an integrator based on the burst config """
        class_name = self.conf.get_simulation_parameter_value(class_name_key)
        parameters = params_dict[params_key]
        noise_framework.build_noise(parameters)
        try:
            return get_traited_instance_for_name(class_name, parent_class, parameters)
        except Exception:
            self.logger.exception("Could not create an instance of %s with the given parameters. "
                                  "A new instance will be created with the default values." % class_name)
            return get_traited_instance_for_name(class_name, parent_class, {})


    def __make_shallow_model(self):
        """ Creates a model of the type present in the config without setting any parameters on it """
        class_name = self.conf.get_simulation_parameter_value(PARAM_MODEL)
        return get_traited_instance_for_name(class_name, Model, {})


    def make_model_and_integrator(self):
        """
        :return: A model and an integrator.
        :rtype: Model, Integrator
        """
        params_dict = self._get_params_dict()
        model = self.__make_instance_from_burst_config(params_dict, Model, PARAM_MODEL, MODEL_PARAMETERS)
        integrator = self.__make_instance_from_burst_config(params_dict, Integrator,
                                                            PARAM_INTEGRATOR, INTEGRATOR_PARAMETERS)
        return model, integrator


    def get_connectivity(self):
        """ Prepare Connectivity """
        connectivity_gid = self.conf.get_simulation_parameter_value(PARAM_CONNECTIVITY)
        return ABCAdapter.load_entity_by_gid(connectivity_gid)


    def get_surface(self):
        """ Prepare Surface """
        surface_gid = self.conf.get_simulation_parameter_value(PARAM_SURFACE)
        if surface_gid:
            return ABCAdapter.load_entity_by_gid(surface_gid)
        return None

    @staticmethod
    def group_parameter_values_by_name(model_parameters_list):
        """
        @:param model_parameters_list: Given a list of model parameters like this:
                [{"a": 2.0, 'b': 1.0},
                 {"a": 3.0, 'b': 7.0}])

        @:return: This method will group them by param name to get:
                {'a': [2.0, 3.0], 'b': [1.0, 7.0]}
        """
        ret = {}
        for model_parameters in model_parameters_list:
            for param_name, param_val in model_parameters.iteritems():
                if param_name not in ret:
                    ret[param_name] = []
                ret[param_name].append(param_val)
        return ret


    def write_model_parameters(self, model_name, model_parameters_list):
        """
        Update model parameters in burst config.

        :param model_name: This model will be selected in burst
        :param model_parameters_list: A list of model parameter configurations. One for each connectivity node.
                Ex. [{'a': 1, 'b': 2}, ...]
        """


        def format_param_vals(vals):
            # contract constant array
            if len(set(vals)) == 1:
                vals = [vals[0]]
            return str(vals)

        model_parameters = self.group_parameter_values_by_name(model_parameters_list)
        # change selected model in burst config
        self.conf.update_simulation_parameter(PARAM_MODEL, model_name)

        for param_name, param_vals in model_parameters.iteritems():
            full_name = PARAMS_MODEL_PATTERN % (model_name, param_name)
            self.conf.update_simulation_parameter(full_name, format_param_vals(param_vals))


    def write_noise_parameters(self, noise_dispersions):
        """
        Set noise dispersions in burst config.
        It will set all nsig fields it can find in the config (at least 1 per stochastic integrator).
        :param noise_dispersions: A list of noise dispersions. One for each connectivity node. Ex [{'V': 1, 'W':2}, ...]
        """
        noise_dispersions = self.group_parameter_values_by_name(noise_dispersions)
        # Flatten the dict to an array of shape (state_vars, nodes)
        state_vars = self.__make_shallow_model().state_variables
        noise_arr = [noise_dispersions[sv] for sv in state_vars]

        simulator_adapter = self._build_simulator_adapter()
        for param_name in simulator_adapter.noise_configurable_parameters():
            self.conf.update_simulation_parameter(param_name, str(noise_arr))
class ModelValidator(object):
    overwrites = {}


    def __init__(self, overwrites=None, settings_file=None):
        """ Parameters can be overwritten either from a settigns file or from a dictionary. """
        if overwrites is not None:
            self.overwrites.update(overwrites)
        if settings_file is not None:
            settings = open(sys.argv[1]).read()
            for line in settings.split('\n'):
                key, value = line.split('=')
                self.overwrites[key.strip()] = value.strip()
        if KEY_PROJECT not in self.overwrites:
            raise Exception("Settings file should contain the id of the project: %s=1" % KEY_PROJECT)
        self.project = dao.get_project_by_id(self.overwrites[KEY_PROJECT])
        self.flow_service = FlowService()
        self.operation_service = OperationService()


    def launch_validation(self):
        """
        Prepare the arguments to be submitted and launch actual operations group.
        TODO: Now get the results and check if any errors
        """
        _, algo_group = self.flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
        simulator_adapter = self.flow_service.build_adapter_instance(algo_group)
        launch_args = {}
        flatten_interface = simulator_adapter.flaten_input_interface()
        prepared_flatten_interface = self.flow_service.prepare_parameters(flatten_interface, self.project.id,
                                                                          algo_group.fk_category)
        for entry in prepared_flatten_interface:
            value = entry['default']
            if isinstance(value, dict):
                value = str(value)
            if hasattr(value, 'tolist'):
                value = value.tolist()
            launch_args[entry['name']] = value
        launch_args.update(self.overwrites)
        
        nr_of_operations = 1
        for key in self.overwrites:
            if key.startswith(PARAM_RANGE_PREFIX):
                range_values = self.operation_service.get_range_values(launch_args, key)
                nr_of_operations *= len(range_values)
        do_launch = False
        print "Warning! This will launch %s operations. Do you agree? (yes/no)" % nr_of_operations
        while 1:
            accept = raw_input()
            if accept.lower() == 'yes':
                do_launch = True
                break
            if accept.lower() == 'no':
                do_launch = False
                break
            print "Please type either yes or no"
        
        if do_launch:
            self.launched_operations = self.flow_service.fire_operation(simulator_adapter, self.project.administrator,
                                                                        self.project.id, **launch_args)
            return self.validate_results(0)
        else:
            return "Operation canceled by user."


    def validate_results(self, last_verified_index):
        error_count = 0
        while last_verified_index < len(self.launched_operations):
            operation_to_check = self.launched_operations[last_verified_index]
            operation = dao.get_operation_by_id(operation_to_check.id)
            if not operation.has_finished:
                sleep(10)
            if operation.status == STATUS_ERROR:
                sys.stdout.write("E(" + str(operation_to_check.id) + ")")
                error_count += 1
                last_verified_index += 1
                sys.stdout.flush()
            if operation.status == STATUS_FINISHED:
                last_verified_index += 1
                sys.stdout.write('.')
                sys.stdout.flush()
        if error_count:
            return "%s operations in error; %s operations successfully." % (error_count,
                                                                            len(self.launched_operations) - error_count)
        return "All operations finished successfully!"