Example #1
0
    def test_get_inputs_for_group(self, datatype_group_factory,
                                  test_adapter_factory):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        """
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        conn = TestFactory.import_zip_connectivity(self.test_user,
                                                   self.test_project, zip_path)
        conn.visible = False
        dao.store_entity(conn)

        group = OperationGroup(self.test_project.id, "group", "range1[1..2]")
        group = dao.store_entity(group)

        view_model = BaseBCTModel()
        view_model.connectivity = conn.gid
        adapter = ABCAdapter.build_adapter_from_class(
            TransitivityBinaryDirected)
        algorithm = adapter.stored_adapter

        operation1 = Operation(self.test_user.id,
                               self.test_project.id,
                               algorithm.id,
                               json.dumps({'gid': view_model.gid.hex}),
                               op_group_id=group.id)
        operation2 = Operation(self.test_user.id,
                               self.test_project.id,
                               algorithm.id,
                               json.dumps({'gid': view_model.gid.hex}),
                               op_group_id=group.id)
        dao.store_entities([operation1, operation2])

        OperationService()._store_view_model(
            operation1, dao.get_project_by_id(self.test_project.id),
            view_model)
        OperationService()._store_view_model(
            operation2, dao.get_project_by_id(self.test_project.id),
            view_model)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(
            group.id, self.relevant_filter)
        assert len(inputs) == 0

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(
            group.id, self.full_filter)
        assert len(inputs) == 1, "Incorrect number of dataTypes."
        assert group.id == inputs[0].id, "Retrieved wrong dataType."

        conn.visible = True
        dao.store_entity(conn)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(
            group.id, self.relevant_filter)
        assert len(inputs) == 1, "Incorrect number of dataTypes."

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(
            group.id, self.full_filter)
        assert len(inputs) == 1, "Incorrect number of dataTypes."
        assert group.id == inputs[0].id, "Retrieved wrong dataType."
def update():
    """
    Move images previously stored in TVB operation folders, in a single folder/project.
    """
    projects_count = dao.get_all_projects(is_count=True)

    for page_start in range(0, projects_count, PAGE_SIZE):

        projects_page = dao.get_all_projects(page_start=page_start,
                                             page_end=min(page_start + PAGE_SIZE, projects_count))

        for project in projects_page:
            figures = _figures_in_project(project.id)

            for figure in figures:
                figure.file_path = "%s-%s" % (figure.operation.id, figure.file_path)

            dao.store_entities(figures)

            project_path = FilesHelper().get_project_folder(project)
            update_manager = ProjectUpdateManager(project_path)
            update_manager.run_all_updates()

            project.version = TvbProfile.current.version.PROJECT_VERSION
            dao.store_entity(project)
Example #3
0
def update():
    """
    Move images previously stored in TVB operation folders, in a single folder/project.
    """
    projects_count = dao.get_all_projects(is_count=True)

    for page_start in range(0, projects_count, PAGE_SIZE):

        projects_page = dao.get_all_projects(page_start=page_start,
                                             page_size=PAGE_SIZE)

        for project in projects_page:
            figures = _figures_in_project(project.id)

            for figure in figures:
                figure.file_path = "%s-%s" % (figure.operation.id,
                                              figure.file_path)

            dao.store_entities(figures)

            project_path = FilesHelper().get_project_folder(project)
            update_manager = ProjectUpdateManager(project_path)
            update_manager.run_all_updates()

            project.version = TvbProfile.current.version.PROJECT_VERSION
            dao.store_entity(project)
Example #4
0
def initialize(introspected_modules, load_xml_events=True):
    """
    Initialize when Application is starting.
    Check for new algorithms or new DataTypes.
    """
    SettingsService().check_db_url(TvbProfile.current.db.DB_URL)

    ## Initialize DB
    is_db_empty = initialize_startup()

    ## Create Projects storage root in case it does not exist.
    initialize_storage()

    ## Populate DB algorithms, by introspection
    event_folders = []
    start_introspection_time = datetime.datetime.now()
    for module in introspected_modules:
        introspector = Introspector(module)
        # Introspection is always done, even if DB was not empty.
        introspector.introspect(True)
        event_path = introspector.get_events_path()
        if event_path:
            event_folders.append(event_path)

    # Now remove or mark as removed any unverified Algo-Group, Algo-Category or Portlet
    to_invalidate, to_remove = dao.get_non_validated_entities(
        start_introspection_time)
    for entity in to_invalidate:
        entity.removed = True
    dao.store_entities(to_invalidate)
    for entity in to_remove:
        dao.remove_entity(entity.__class__, entity.id)

    ## Populate events
    if load_xml_events:
        read_events(event_folders)

    if not TvbProfile.is_first_run():
        ## Create default users.
        if is_db_empty:
            dao.store_entity(
                model.User(TvbProfile.current.web.admin.SYSTEM_USER_NAME, None,
                           None, True, None))
            UserService().create_user(
                username=TvbProfile.current.web.admin.ADMINISTRATOR_NAME,
                password=TvbProfile.current.web.admin.ADMINISTRATOR_PASSWORD,
                email=TvbProfile.current.web.admin.ADMINISTRATOR_EMAIL,
                role=model.ROLE_ADMINISTRATOR)

        ## In case actions related to latest code-changes are needed, make sure they are executed.
        CodeUpdateManager().run_all_updates()

        ## In case the H5 version changed, run updates on all DataTypes
        if TvbProfile.current.version.DATA_CHECKED_TO_VERSION < TvbProfile.current.version.DATA_VERSION:
            thread = threading.Thread(
                target=FilesUpdateManager().run_all_updates)
            thread.start()

        ## Clean tvb-first-time-run temporary folder, as we are no longer at the first run:
        shutil.rmtree(TvbProfile.current.FIRST_RUN_STORAGE, True)
Example #5
0
def initialize(introspected_modules, load_xml_events=True):
    """
    Initialize when Application is starting.
    Check for new algorithms or new DataTypes.
    """
    SettingsService().check_db_url(TvbProfile.current.db.DB_URL)
    
    ## Initialize DB
    is_db_empty = initialize_startup()
    
    ## Create Projects storage root in case it does not exist.
    initialize_storage()
    
    ## Populate DB algorithms, by introspection
    event_folders = []
    start_introspection_time = datetime.datetime.now()
    for module in introspected_modules:
        introspector = Introspector(module)
        # Introspection is always done, even if DB was not empty.
        introspector.introspect(True)
        event_path = introspector.get_events_path()
        if event_path:
            event_folders.append(event_path)

    # Now remove or mark as removed any unverified Algo-Group, Algo-Category or Portlet
    to_invalidate, to_remove = dao.get_non_validated_entities(start_introspection_time)
    for entity in to_invalidate:
        entity.removed = True
    dao.store_entities(to_invalidate)
    for entity in to_remove:
        dao.remove_entity(entity.__class__, entity.id)
   
    ## Populate events
    if load_xml_events:
        read_events(event_folders)

    if not TvbProfile.is_first_run():
        ## Create default users.
        if is_db_empty:
            dao.store_entity(model.User(TvbProfile.current.web.admin.SYSTEM_USER_NAME, None, None, True, None))
            UserService().create_user(username=TvbProfile.current.web.admin.ADMINISTRATOR_NAME,
                                      password=TvbProfile.current.web.admin.ADMINISTRATOR_PASSWORD,
                                      email=TvbProfile.current.web.admin.ADMINISTRATOR_EMAIL,
                                      role=model.ROLE_ADMINISTRATOR)
        
        ## In case actions related to latest code-changes are needed, make sure they are executed.
        CodeUpdateManager().run_all_updates()

        ## In case the H5 version changed, run updates on all DataTypes
        if TvbProfile.current.version.DATA_CHECKED_TO_VERSION < TvbProfile.current.version.DATA_VERSION:
            thread = threading.Thread(target=FilesUpdateManager().run_all_updates)
            thread.start()

        ## Clean tvb-first-time-run temporary folder, as we are no longer at the first run:
        shutil.rmtree(TvbProfile.current.FIRST_RUN_STORAGE, True)
Example #6
0
def initialize(skip_import=False, skip_updates=False):
    """
    Initialize when Application is starting.
    Check for new algorithms or new DataTypes.
    """
    SettingsService().check_db_url(TvbProfile.current.db.DB_URL)

    # Initialize DB
    is_db_empty = initialize_startup()

    # Create Projects storage root in case it does not exist.
    initialize_storage()

    # Populate DB algorithms, by introspection
    start_introspection_time = datetime.now()
    # Introspection is always done, even if DB was not empty.
    introspector = Introspector()
    introspector.introspect()

    to_invalidate = dao.get_non_validated_entities(start_introspection_time)
    for entity in to_invalidate:
        entity.removed = True
    dao.store_entities(to_invalidate)

    if not TvbProfile.is_first_run() and not skip_updates:
        # Create default users.
        if is_db_empty:
            dao.store_entity(
                User(TvbProfile.current.web.admin.SYSTEM_USER_NAME, TvbProfile.current.web.admin.SYSTEM_USER_NAME, None,
                     None, True, None))
            UserService().create_user(username=TvbProfile.current.web.admin.ADMINISTRATOR_NAME,
                                      display_name=TvbProfile.current.web.admin.ADMINISTRATOR_DISPLAY_NAME,
                                      password=TvbProfile.current.web.admin.ADMINISTRATOR_PASSWORD,
                                      email=TvbProfile.current.web.admin.ADMINISTRATOR_EMAIL,
                                      role=ROLE_ADMINISTRATOR, skip_import=skip_import)

        # In case actions related to latest code-changes are needed, make sure they are executed.
        CodeUpdateManager().run_all_updates()

        # In case the H5 version changed, run updates on all DataTypes
        thread = None
        if TvbProfile.current.version.DATA_CHECKED_TO_VERSION < TvbProfile.current.version.DATA_VERSION:
            thread = threading.Thread(target=FilesUpdateManager().run_all_updates)
            thread.start()

        # Clean tvb-first-time-run temporary folder, as we are no longer at the first run:
        shutil.rmtree(TvbProfile.current.FIRST_RUN_STORAGE, True)
        return thread
    def test_get_upload_operations(self):
        """
        Test get_all when filter is for Upload category.
        """
        self.__init_algorithmn()
        upload_algo = self._create_algo_for_upload()

        project = model.Project("test_proj_2", self.test_user.id, "desc")
        project = dao.store_entity(project)

        op1 = model.Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "")
        op2 = model.Operation(self.test_user.id, project.id, upload_algo.id, "", status=model.STATUS_FINISHED)
        op3 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "")
        op4 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "", status=model.STATUS_FINISHED)
        op5 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "", status=model.STATUS_FINISHED)
        operations = dao.store_entities([op1, op2, op3, op4, op5])

        upload_operations = self.project_service.get_all_operations_for_uploaders(self.test_project.id)
        self.assertEqual(2, len(upload_operations), "Wrong number of upload operations.")
        upload_ids = [operation.id for operation in upload_operations]
        for i in [3, 4]:
            self.assertTrue(operations[i].id in upload_ids,
                            "The operation should be an upload operation.")
        for i in [0, 1, 2]:                    
            self.assertFalse(operations[i].id in upload_ids, 
                             "The operation should not be an upload operation.")
    def test_get_upload_operations(self):
        """
        Test get_all when filter is for Upload category.
        """
        self.__init_algorithmn()
        upload_algo = self._create_algo_for_upload()

        project = Project("test_proj_2", self.test_user.id, "desc")
        project = dao.store_entity(project)

        op1 = Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "")
        op2 = Operation(self.test_user.id, project.id, upload_algo.id, "", status=STATUS_FINISHED)
        op3 = Operation(self.test_user.id, self.test_project.id, upload_algo.id, "")
        op4 = Operation(self.test_user.id, self.test_project.id, upload_algo.id, "", status=STATUS_FINISHED)
        op5 = Operation(self.test_user.id, self.test_project.id, upload_algo.id, "", status=STATUS_FINISHED)
        operations = dao.store_entities([op1, op2, op3, op4, op5])

        upload_operations = self.project_service.get_all_operations_for_uploaders(self.test_project.id)
        assert 2 == len(upload_operations), "Wrong number of upload operations."
        upload_ids = [operation.id for operation in upload_operations]
        for i in [3, 4]:
            assert operations[i].id in upload_ids, \
                "The operation should be an upload operation."
        for i in [0, 1, 2]:
            assert not operations[i].id in upload_ids, \
                "The operation should not be an upload operation."
    def test_get_inputs_for_op_group(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The DataType inputs will be from a DataType group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        second_dt.visible = False
        dao.store_entity(second_dt)

        op_group = model.OperationGroup(project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "1", "param_1": first_dt.gid, "param_6": "2"})
        params_2 = json.dumps({"param_5": "1", "param_4": second_dt.gid, "param_6": "5"})

        algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        algo = dao.get_algorithm_by_group(algo_group.id)

        op1 = model.Operation(self.test_user.id, project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id, project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 0)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")

        first_dt.visible = True
        dao.store_entity(first_dt)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")
    def test_get_inputs_for_op_group(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The DataType inputs will be from a DataType group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        second_dt.visible = False
        dao.store_entity(second_dt)

        op_group = model.OperationGroup(project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "1", "param_1": first_dt.gid, "param_6": "2"})
        params_2 = json.dumps({"param_5": "1", "param_4": second_dt.gid, "param_6": "5"})

        algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        algo = dao.get_algorithm_by_group(algo_group.id)

        op1 = model.Operation(self.test_user.id, project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id, project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 0)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")

        first_dt.visible = True
        dao.store_entity(first_dt)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")
    def test_get_inputs_for_op_group_simple_inputs(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The dataType inputs will not be part of a dataType group.
        """
        #it's a list of 3 elem.
        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        array_wrapper_ids = []
        for datatype in array_wrappers:
            array_wrapper_ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(array_wrapper_ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        op_group = model.OperationGroup(self.test_project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "2", "param_1": array_wrappers[0][2],
                               "param_2": array_wrappers[1][2], "param_6": "7"})
        params_2 = json.dumps({"param_5": "5", "param_3": array_wrappers[2][2],
                               "param_2": array_wrappers[1][2], "param_6": "6"})

        algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        algo = dao.get_algorithm_by_group(algo_group.id)

        op1 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 2)
        self.assertFalse(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 3, "Incorrect number of dataTypes.")
        self.assertTrue(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id])
        self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id])
        self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id])
    def test_get_inputs_for_op_group_simple_inputs(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The dataType inputs will not be part of a dataType group.
        """
        #it's a list of 3 elem.
        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        array_wrapper_ids = []
        for datatype in array_wrappers:
            array_wrapper_ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(array_wrapper_ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        op_group = model.OperationGroup(self.test_project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "2", "param_1": array_wrappers[0][2],
                               "param_2": array_wrappers[1][2], "param_6": "7"})
        params_2 = json.dumps({"param_5": "5", "param_3": array_wrappers[2][2],
                               "param_2": array_wrappers[1][2], "param_6": "6"})

        algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        algo = dao.get_algorithm_by_group(algo_group.id)

        op1 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 2)
        self.assertFalse(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 3, "Incorrect number of dataTypes.")
        self.assertTrue(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id])
        self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id])
        self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id])
 def test_is_upload_operation(self):
     """
     Tests that upload and non-upload algorithms are created and run accordingly
     """
     self.__init_algorithmn()
     upload_algo = self._create_algo_for_upload()
     op1 = Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "")
     op2 = Operation(self.test_user.id, self.test_project.id, upload_algo.id, "")
     operations = dao.store_entities([op1, op2])
     is_upload_operation = self.project_service.is_upload_operation(operations[0].gid)
     assert not is_upload_operation, "The operation is not an upload operation."
     is_upload_operation = self.project_service.is_upload_operation(operations[1].gid)
     assert is_upload_operation, "The operation is an upload operation."
 def test_is_upload_operation(self):
     """
     Tests that upload and non-upload algorithms are created and run accordingly
     """
     self.__init_algorithmn()
     upload_algo = self._create_algo_for_upload()
     op1 = model.Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "")
     op2 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "")
     operations = dao.store_entities([op1, op2])
     is_upload_operation = self.project_service.is_upload_operation(operations[0].gid)
     self.assertFalse(is_upload_operation, "The operation is not an upload operation.")
     is_upload_operation = self.project_service.is_upload_operation(operations[1].gid)
     self.assertTrue(is_upload_operation, "The operation is an upload operation.")
Example #15
0
    def prepare_operations(self, user_id, project_id, algorithm, category, metadata,
                           visible=True, existing_dt_group=None, **kwargs):
        """
        Do all the necessary preparations for storing an operation. If it's the case of a 
        range of values create an operation group and multiple operations for each possible
        instance from the range.
        :param metadata: Initial MetaData with potential Burst identification inside.
        """
        operations = []

        available_args, group = self._prepare_group(project_id, existing_dt_group, kwargs)
        if len(available_args) > TvbProfile.current.MAX_RANGE_NUMBER:
            raise LaunchException("Too big range specified. You should limit the"
                                  " resulting operations to %d" % TvbProfile.current.MAX_RANGE_NUMBER)
        else:
            self.logger.debug("Launching a range with %d operations..." % len(available_args))
        group_id = None
        if group is not None:
            group_id = group.id
        metadata, user_group = self._prepare_metadata(metadata, category, group, kwargs)

        self.logger.debug("Saving Operation(userId=" + str(user_id) + ",projectId=" + str(project_id) + "," +
                          str(metadata) + ",algorithmId=" + str(algorithm.id) + ", ops_group= " + str(group_id) + ")")

        visible_operation = visible and category.display is False
        meta_str = json.dumps(metadata)
        for (one_set_of_args, range_vals) in available_args:
            range_values = json.dumps(range_vals) if range_vals else None
            operation = model.Operation(user_id, project_id, algorithm.id,
                                        json.dumps(one_set_of_args, cls=MapAsJson.MapAsJsonEncoder), meta_str,
                                        op_group_id=group_id, user_group=user_group, range_values=range_values)
            operation.visible = visible_operation
            operations.append(operation)
        operations = dao.store_entities(operations)

        if group is not None:
            burst_id = None
            if DataTypeMetaData.KEY_BURST in metadata:
                burst_id = metadata[DataTypeMetaData.KEY_BURST]
            if existing_dt_group is None:
                datatype_group = model.DataTypeGroup(group, operation_id=operations[0].id, fk_parent_burst=burst_id,
                                                     state=metadata[DataTypeMetaData.KEY_STATE])
                dao.store_entity(datatype_group)
            else:
                # Reset count
                existing_dt_group.count_results = None
                dao.store_entity(existing_dt_group)

        return operations, group
    def prepare_operations(self, user_id, project_id, algorithm, category, metadata,
                           visible=True, existing_dt_group=None, **kwargs):
        """
        Do all the necessary preparations for storing an operation. If it's the case of a 
        range of values create an operation group and multiple operations for each possible
        instance from the range.
        :param metadata: Initial MetaData with potential Burst identification inside.
        """
        operations = []

        available_args, group = self._prepare_group(project_id, existing_dt_group, kwargs)
        if len(available_args) > TvbProfile.current.MAX_RANGE_NUMBER:
            raise LaunchException("Too big range specified. You should limit the"
                                  " resulting operations to %d" % TvbProfile.current.MAX_RANGE_NUMBER)
        else:
            self.logger.debug("Launching a range with %d operations..." % len(available_args))
        group_id = None
        if group is not None:
            group_id = group.id
        metadata, user_group = self._prepare_metadata(metadata, category, group, kwargs)

        self.logger.debug("Saving Operation(userId=" + str(user_id) + ",projectId=" + str(project_id) + "," +
                          str(metadata) + ",algorithmId=" + str(algorithm.id) + ", ops_group= " + str(group_id) + ")")

        visible_operation = visible and category.display is False
        meta_str = json.dumps(metadata)
        for (one_set_of_args, range_vals) in available_args:
            range_values = json.dumps(range_vals) if range_vals else None
            operation = model.Operation(user_id, project_id, algorithm.id,
                                        json.dumps(one_set_of_args, cls=MapAsJson.MapAsJsonEncoder), meta_str,
                                        op_group_id=group_id, user_group=user_group, range_values=range_values)
            operation.visible = visible_operation
            operations.append(operation)
        operations = dao.store_entities(operations)

        if group is not None:
            burst_id = None
            if DataTypeMetaData.KEY_BURST in metadata:
                burst_id = metadata[DataTypeMetaData.KEY_BURST]
            if existing_dt_group is None:
                datatype_group = model.DataTypeGroup(group, operation_id=operations[0].id, fk_parent_burst=burst_id,
                                                     state=metadata[DataTypeMetaData.KEY_STATE])
                dao.store_entity(datatype_group)
            else:
                # Reset count
                existing_dt_group.count_results = None
                dao.store_entity(existing_dt_group)

        return operations, group
Example #17
0
 def test_is_upload_operation(self):
     self.__init_algorithmn()
     upload_algo = self._create_algo_for_upload()
     op1 = model.Operation(self.test_user.id, self.test_project.id,
                           self.algo_inst.id, "")
     op2 = model.Operation(self.test_user.id, self.test_project.id,
                           upload_algo.id, "")
     operations = dao.store_entities([op1, op2])
     is_upload_operation = self.project_service.is_upload_operation(
         operations[0].gid)
     self.assertFalse(is_upload_operation,
                      "The operation is not an upload operation.")
     is_upload_operation = self.project_service.is_upload_operation(
         operations[1].gid)
     self.assertTrue(is_upload_operation,
                     "The operation is an upload operation.")
    def prepare_operations(self, user_id, project, algorithm, category,
                           visible=True, existing_dt_group=None, view_model=None, **kwargs):
        """
        Do all the necessary preparations for storing an operation. If it's the case of a 
        range of values create an operation group and multiple operations for each possible
        instance from the range.
        """
        operations = []

        available_args, group = self._prepare_group(project.id, existing_dt_group, kwargs)
        if len(available_args) > TvbProfile.current.MAX_RANGE_NUMBER:
            raise LaunchException("Too big range specified. You should limit the"
                                  " resulting operations to %d" % TvbProfile.current.MAX_RANGE_NUMBER)
        else:
            self.logger.debug("Launching a range with %d operations..." % len(available_args))
        group_id = None
        if group is not None:
            group_id = group.id
        ga = self._prepare_metadata(category, kwargs, group)
        ga.visible = visible
        view_model.generic_attributes = ga

        self.logger.debug("Saving Operation(userId=" + str(user_id) + ",projectId=" + str(project.id) +
                          ",algorithmId=" + str(algorithm.id) + ", ops_group= " + str(group_id) + ")")

        for (one_set_of_args, range_vals) in available_args:
            range_values = json.dumps(range_vals) if range_vals else None
            operation = Operation(user_id, project.id, algorithm.id, json.dumps({'gid': view_model.gid.hex}),
                                  op_group_id=group_id, user_group=ga.operation_tag, range_values=range_values)
            operation.visible = visible
            operations.append(operation)
        operations = dao.store_entities(operations)

        if group is not None:
            if existing_dt_group is None:
                datatype_group = DataTypeGroup(group, operation_id=operations[0].id, state=category.defaultdatastate)
                dao.store_entity(datatype_group)
            else:
                # Reset count
                existing_dt_group.count_results = None
                dao.store_entity(existing_dt_group)

        for operation in operations:
            self._store_view_model(operation, project, view_model)

        return operations, group
Example #19
0
    def test_get_inputs_for_op_group(self, datatype_group_factory,
                                     test_adapter_factory):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The DataType inputs will be from a DataType group.
        """
        group = datatype_group_factory(project=self.test_project)
        datatypes = dao.get_datatypes_from_datatype_group(group.id)

        datatypes[0].visible = False
        dao.store_entity(datatypes[0])
        datatypes[1].visible = False
        dao.store_entity(datatypes[1])

        op_group = OperationGroup(self.test_project.id, "group",
                                  "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({
            "param_5": "1",
            "param_1": datatypes[0].gid,
            "param_6": "2"
        })
        params_2 = json.dumps({
            "param_5": "1",
            "param_4": datatypes[1].gid,
            "param_6": "5"
        })

        test_adapter_factory(adapter_class=TestAdapter3)
        algo = dao.get_algorithm_by_module(
            'tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')

        op1 = Operation(self.test_user.id,
                        self.test_project.id,
                        algo.id,
                        params_1,
                        op_group_id=op_group.id)
        op2 = Operation(self.test_user.id,
                        self.test_project.id,
                        algo.id,
                        params_2,
                        op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(
            op_group.id, self.relevant_filter)
        assert len(inputs) == 0

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(
            op_group.id, self.full_filter)
        assert len(inputs) == 1, "Incorrect number of dataTypes."
        assert not datatypes[0].id == inputs[0].id, "Retrieved wrong dataType."
        assert not datatypes[1].id == inputs[0].id, "Retrieved wrong dataType."
        assert group.id == inputs[0].id, "Retrieved wrong dataType."

        datatypes[0].visible = True
        dao.store_entity(datatypes[0])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(
            op_group.id, self.relevant_filter)
        assert len(inputs) == 1, "Incorrect number of dataTypes."
        assert not datatypes[0].id == inputs[0].id, "Retrieved wrong dataType."
        assert not datatypes[1].id == inputs[0].id, "Retrieved wrong dataType."
        assert group.id == inputs[0].id, "Retrieved wrong dataType."

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(
            op_group.id, self.full_filter)
        assert len(inputs) == 1, "Incorrect number of dataTypes."
        assert not datatypes[0].id == inputs[0].id, "Retrieved wrong dataType."
        assert not datatypes[1].id == inputs[0].id, "Retrieved wrong dataType."
        assert group.id == inputs[0].id, "Retrieved wrong dataType."