def reset_password(self, **data):
        """
        Service Layer for resetting a password.
        """
        if (KEY_USERNAME not in data) or len(data[KEY_USERNAME]) < 1:
            raise UsernameException("Empty UserName!")
        if (KEY_EMAIL not in data) or len(data[KEY_EMAIL]) < 1:
            raise UsernameException("Empty Email!")

        old_pass, user = None, None
        try:
            user_name = data[KEY_USERNAME]
            email = data[KEY_EMAIL]
            user = dao.get_user_by_name_email(user_name, email)
            if user is None:
                raise UsernameException("Given credentials don't match!")

            old_pass = user.password
            new_pass = ''.join(chr(randint(48, 122)) for _ in range(DEFAULT_PASS_LENGTH))
            user.password = md5(new_pass).hexdigest()
            self.edit_user(user, old_pass)
            self.logger.info("Setting new password for user " + user_name + " !")
            email_sender.send(FROM_ADDRESS, email, SUBJECT_RECOVERY, TEXT_RECOVERY % (new_pass,))
            return TEXT_DISPLAY
        except Exception, excep:
            if old_pass and len(old_pass) > 1 and user:
                user.password = old_pass
                dao.store_entity(user)
            self.logger.error("Could not change user password!")
            self.logger.exception(excep)
            raise UsernameException(excep.message)
Пример #2
0
 def test_get_users_for_project(self):
     """
     Get all members of a project except the current user.
     """
     user_1 = model.User("test_user1", "test_pass", "*****@*****.**", False, "user")
     dao.store_entity(user_1)
     user_2 = model.User("test_user2", "test_pass", "*****@*****.**", False, "user")
     dao.store_entity(user_2)
     user_3 = model.User("test_user3", "test_pass", "*****@*****.**", False, "user")
     dao.store_entity(user_3)
     user_4 = model.User("test_user4", "test_pass", "*****@*****.**", False, "user")
     dao.store_entity(user_4)
     user_5 = model.User("test_user5", "test_pass", "*****@*****.**", False, "user")
     dao.store_entity(user_5)
     admin = dao.get_user_by_name("test_user1")
     member1 = dao.get_user_by_name("test_user2")
     member2 = dao.get_user_by_name("test_user5")
     data = dict(name="test_proj", description="test_desc", users=[member1.id, member2.id])
     project = ProjectService().store_project(admin, True, None, **data)
     all_users, members, pag = self.user_service.get_users_for_project(admin.username, project.id)
     assert len(members) == 2, "More members than there should be."
     assert len(all_users) == 5, "Admin should not be viewed as member. " \
                                 "Neither should users that were not part of the project's users list."
     assert pag == 1, "Invalid total pages number."
     for user in all_users:
         assert user.username != admin.username, "Admin is in members!"
 def test_remove_project_node(self):
     """
     Test removing of a node from a project.
     """
     inserted_project, gid, gid_op = self._create_value_wrapper(self.test_user) 
     project_to_link = model.Project("Link", self.test_user.id, "descript")
     project_to_link = dao.store_entity(project_to_link)
     exact_data = dao.get_datatype_by_gid(gid)
     dao.store_entity(model.Links(exact_data.id, project_to_link.id))
     self.assertTrue(dao.get_datatype_by_gid(gid) is not None, "Initialization problem!")
     
     operation_id = dao.get_generic_entity(model.Operation, gid_op, 'gid')[0].id
     op_folder = self.structure_helper.get_project_folder("test_proj", str(operation_id))
     self.assertTrue(os.path.exists(op_folder))
     sub_files = os.listdir(op_folder)
     self.assertEqual(2, len(sub_files))
     ### Validate that no more files are created than needed.
     
     self.project_service._remove_project_node_files(inserted_project.id, gid)
     sub_files = os.listdir(op_folder)
     self.assertEqual(1, len(sub_files))
     ### operation.xml file should still be there
     
     op_folder = self.structure_helper.get_project_folder("Link", str(operation_id + 1)) 
     sub_files = os.listdir(op_folder)
     self.assertEqual(2, len(sub_files))
     self.assertTrue(dao.get_datatype_by_gid(gid) is not None, "Data should still be in DB, because of links")
     self.project_service._remove_project_node_files(project_to_link.id, gid)
     self.assertTrue(dao.get_datatype_by_gid(gid) is None)  
     sub_files = os.listdir(op_folder)
     self.assertEqual(1, len(sub_files))
Пример #4
0
    def create_operation(algorithm=None, test_user=None, test_project=None, 
                         operation_status=model.STATUS_FINISHED, parameters="test params"):
        """
        Create persisted operation.
        
        :param algorithm: When not None, introspect TVB and TVB_TEST for adapters.
        :return: Operation entity after persistence. 
        """
        if algorithm is None:
            algorithm = dao.get_algorithm_by_module('tvb.tests.framework.adapters.ndimensionarrayadapter',
                                                    'NDimensionArrayAdapter')

        if test_user is None:
            test_user = TestFactory.create_user()
            
        if test_project is None:
            test_project = TestFactory.create_project(test_user)
            
        meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe",
                DataTypeMetaData.KEY_STATE: "RAW_DATA"}
        operation = model.Operation(test_user.id, test_project.id, algorithm.id, parameters, meta=json.dumps(meta),
                                    status=operation_status)
        dao.store_entity(operation)
        ### Make sure lazy attributes are correctly loaded.
        return dao.get_operation_by_id(operation.id)
    def submit(self, dynamic_gid, dynamic_name):
        if dao.get_dynamic_by_name(dynamic_name):
            return {'saved': False, 'msg': 'There is another configuration with the same name'}

        dynamic = self.get_cached_dynamic(dynamic_gid)
        model = dynamic.model
        integrator = dynamic.integrator

        model_parameters = []

        for name in model.ui_configurable_parameters:
            value = getattr(model, name)[0]
            model_parameters.append((name, value))

        entity = tvb.core.entities.model.Dynamic(
            dynamic_name,
            common.get_logged_user().id,
            model.__class__.__name__,
            json.dumps(model_parameters),
            integrator.__class__.__name__,
            None
            # todo: serialize integrator parameters
            # json.dumps(integrator.raw_ui_integrator_parameters)
        )

        dao.store_entity(entity)
        return {'saved': True}
Пример #6
0
    def reset_password(self, **data):
        """
        Service Layer for resetting a password.
        """
        if (KEY_EMAIL not in data) or len(data[KEY_EMAIL]) < 1:
            raise UsernameException("Empty Email!")

        old_pass, user = None, None
        try:
            email = data[KEY_EMAIL]
            name_hint = data[KEY_USERNAME]
            user = dao.get_user_by_email(email, name_hint)
            if user is None:
                raise UsernameException("No singular user could be found for the given data!")

            old_pass = user.password
            new_pass = ''.join(chr(randint(48, 122)) for _ in range(DEFAULT_PASS_LENGTH))
            user.password = md5(new_pass).hexdigest()
            self.edit_user(user, old_pass)
            self.logger.info("Resetting password for email : " + email)
            email_sender.send(FROM_ADDRESS, email, SUBJECT_RECOVERY, TEXT_RECOVERY % (user.username, new_pass))
            return TEXT_DISPLAY
        except Exception, excep:
            if old_pass and len(old_pass) > 1 and user:
                user.password = old_pass
                dao.store_entity(user)
            self.logger.exception("Could not change user password!")
            raise UsernameException(excep.message)
    def upgrade_file(self, input_file_name, datatype=None):
        """
        Upgrades the given file to the latest data version. The file will be upgraded
        sequentially, up until the current version from tvb.basic.config.settings.VersionSettings.DB_STRUCTURE_VERSION
        
        :param input_file_name the path to the file which needs to be upgraded
        :return True, when update was needed and running it was successful.
        False, the the file is already up to date.

        """
        if self.is_file_up_to_date(input_file_name):
            # Avoid running the DB update of size, when H5 is not being changed, to speed-up
            return False

        file_version = self.get_file_data_version(input_file_name)
        self.log.info("Updating from version %s , file: %s " % (file_version, input_file_name))
        for script_name in self.get_update_scripts(file_version):
            self.run_update_script(script_name, input_file=input_file_name)

        if datatype:
            # Compute and update the disk_size attribute of the DataType in DB:
            datatype.disk_size = self.files_helper.compute_size_on_disk(input_file_name)
            dao.store_entity(datatype)

        return True
Пример #8
0
 def test_validate_user_non_existent(self):
     """
     Flow for trying to validate a user that doesn't exist in the database.
     """
     user = model.User("test_user", "test_pass", "*****@*****.**", True, "user")
     dao.store_entity(user)
     assert not self.user_service.validate_user("test_user2"), "Validation done even tho user is non-existent"
Пример #9
0
    def test_remove_project_node(self):
        """
        Test removing of a node from a project.
        """
        inserted_project, gid, gid_op = self._create_value_wrapper(self.test_user) 
        project_to_link = model.Project("Link", self.test_user.id, "descript")
        project_to_link = dao.store_entity(project_to_link)
        exact_data = dao.get_datatype_by_gid(gid)
        dao.store_entity(model.Links(exact_data.id, project_to_link.id))
        assert dao.get_datatype_by_gid(gid) is not None, "Initialization problem!"
        
        operation_id = dao.get_generic_entity(model.Operation, gid_op, 'gid')[0].id
        op_folder = self.structure_helper.get_project_folder("test_proj", str(operation_id))
        assert os.path.exists(op_folder)
        sub_files = os.listdir(op_folder)
        assert 2 == len(sub_files)
        ### Validate that no more files are created than needed.

        if(dao.get_system_user() is None):
            dao.store_entity(model.User(TvbProfile.current.web.admin.SYSTEM_USER_NAME, None, None, True, None))
        self.project_service._remove_project_node_files(inserted_project.id, gid)
        sub_files = os.listdir(op_folder)
        assert 1 == len(sub_files)
        ### operation.xml file should still be there
        
        op_folder = self.structure_helper.get_project_folder("Link", str(operation_id + 1)) 
        sub_files = os.listdir(op_folder)
        assert 2 == len(sub_files)
        assert dao.get_datatype_by_gid(gid) is not None, "Data should still be in DB, because of links"
        self.project_service._remove_project_node_files(project_to_link.id, gid)
        assert dao.get_datatype_by_gid(gid) is None
        sub_files = os.listdir(op_folder)
        assert 1 == len(sub_files)
    def _run_cluster_job(operation_identifier, user_name_label, adapter_instance):
        """
        Threaded Popen
        It is the function called by the ClusterSchedulerClient in a Thread.
        This function starts a new process.
        """
        # Load operation so we can estimate the execution time
        operation = dao.get_operation_by_id(operation_identifier)
        kwargs = parse_json_parameters(operation.parameters)
        kwargs = adapter_instance.prepare_ui_inputs(kwargs)
        time_estimate = int(adapter_instance.get_execution_time_approximation(**kwargs))
        hours = int(time_estimate / 3600)
        minutes = (int(time_estimate) % 3600) / 60
        seconds = int(time_estimate) % 60
        # Anything lower than 5 hours just use default walltime
        if hours < 5:
            walltime = "05:00:00"
        else:
            if hours < 10:
                hours = "0%d" % hours
            else:
                hours = str(hours)
            walltime = "%s:%s:%s" % (hours, str(minutes), str(seconds))

        call_arg = TvbProfile.current.cluster.SCHEDULE_COMMAND % (operation_identifier, user_name_label, walltime)
        LOGGER.info(call_arg)
        process_ = Popen([call_arg], stdout=PIPE, shell=True)
        job_id = process_.stdout.read().replace('\n', '').split(TvbProfile.current.cluster.JOB_ID_STRING)[-1]
        LOGGER.debug("Got jobIdentifier = %s for CLUSTER operationID = %s" % (operation_identifier, job_id))
        operation_identifier = model.OperationProcessIdentifier(operation_identifier, job_id=job_id)
        dao.store_entity(operation_identifier)
    def remove_datatype(self, skip_validation=False):
        """
        Called when a Connectivity is to be removed.
        """
        if not skip_validation:
            associated_ts = dao.get_generic_entity(TimeSeriesRegion, self.handled_datatype.gid, '_connectivity')
            associated_rm = dao.get_generic_entity(RegionMapping, self.handled_datatype.gid, '_connectivity')
            associated_stim = dao.get_generic_entity(StimuliRegion, self.handled_datatype.gid, '_connectivity')
            associated_mes = dao.get_generic_entity(ConnectivityMeasure, self.handled_datatype.gid, '_connectivity')
            msg = "Connectivity cannot be removed as it is used by at least one "

            if len(associated_ts) > 0:
                raise RemoveDataTypeException(msg + " TimeSeriesRegion.")
            if len(associated_rm) > 0:
                raise RemoveDataTypeException(msg + " RegionMapping.")
            if len(associated_stim) > 0:
                raise RemoveDataTypeException(msg + " StimuliRegion.")
            if len(associated_mes) > 0:
                raise RemoveDataTypeException(msg + " ConnectivityMeasure.")

        #### Update child Connectivities, if any.
        child_conns = dao.get_generic_entity(Connectivity, self.handled_datatype.gid, '_parent_connectivity')
        
        if len(child_conns) > 0:
            for one_conn in child_conns[1:]:
                one_conn.parent_connectivity = child_conns[0].gid
            if child_conns and child_conns[0]:
                child_conns[0].parent_connectivity = self.handled_datatype.parent_connectivity
            for one_child in child_conns:
                dao.store_entity(one_child)
        ABCRemover.remove_datatype(self, skip_validation)
        
        
    def _run_cluster_job(operation_identifier, user_name_label, adapter_instance):
        """
        Threaded Popen
        It is the function called by the ClusterSchedulerClient in a Thread.
        This function starts a new process.
        """
        # Load operation so we can estimate the execution time
        operation = dao.get_operation_by_id(operation_identifier)
        kwargs = parse_json_parameters(operation.parameters)
        time_estimate = int(adapter_instance.get_execution_time_approximation(**kwargs))
        hours = int(time_estimate / 3600)
        minutes = (int(time_estimate) % 3600) / 60
        seconds = int(time_estimate) % 60
        # Anything lower than 2 hours just use default walltime
        if hours < 2:
            walltime = "02:00:00"
        elif hours > 23:
            walltime = "23:59:59"
        else:
            walltime = datetime.time(hours, minutes, seconds)
            walltime = walltime.strftime("%H:%M:%S")

        call_arg = config.CLUSTER_SCHEDULE_COMMAND % (walltime, operation_identifier, user_name_label)
        LOGGER.info(call_arg)
        process_ = Popen([call_arg], stdout=PIPE, shell=True)
        job_id = process_.stdout.read().replace('\n', '').split('OAR_JOB_ID=')[-1]
        LOGGER.debug("Got jobIdentifier = %s for CLUSTER operationID = %s" % (operation_identifier, job_id))
        operation_identifier = model.OperationProcessIdentifier(operation_identifier, job_id=job_id)
        dao.store_entity(operation_identifier)
Пример #13
0
 def generate_users(nr_users, nr_projects):
     """
     The generate_users method will create a clean state db with
     :param nr_users: number of users to be generated (with random roles between
                             CLINICIAN and RESEARCHER and random validated state)
     :param nr_projects: maximum number of projects to be generated for each user
     """
     users = []
     
     for i in range(nr_users):
         coin_flip = random.randint(0, 1)
         role = 'CLINICIAN' if coin_flip == 1 else 'RESEARCHER'
         password = md5("test").hexdigest()
         new_user = model.User("gen" + str(i), password, "*****@*****.**", True, role)
         dao.store_entity(new_user)
         new_user = dao.get_user_by_name("gen" + str(i))
         ExtremeTestFactory.VALIDATION_DICT[new_user.id] = 0
         users.append(new_user)
         
     for i in range(nr_users):
         current_user = dao.get_user_by_name("gen" + str(i))
         projects_for_user = random.randint(0, nr_projects)
         for j in range(projects_for_user):         
             data = dict(name='GeneratedProject' + str(i) + '_' + str(j),
                         description='test_desc',
                         users=ExtremeTestFactory.get_users_ids(random.randint(0, nr_users - 3),
                                                                nr_users, current_user.id, users))
             ProjectService().store_project(current_user, True, None, **data)
             ExtremeTestFactory.VALIDATION_DICT[current_user.id] += 1 
Пример #14
0
def update():
    """
    Move images previously stored in TVB operation folders, in a single folder/project.
    """
    projects_count = dao.get_all_projects(is_count=True)

    for page_start in range(0, projects_count, PAGE_SIZE):

        projects_page = dao.get_all_projects(page_start=page_start,
                                             page_end=min(page_start + PAGE_SIZE, projects_count))

        for project in projects_page:
            figures = _figures_in_project(project.id)

            for figure in figures:
                figure.file_path = "%s-%s" % (figure.operation.id, figure.file_path)

            dao.store_entities(figures)

            project_path = FilesHelper().get_project_folder(project)
            update_manager = ProjectUpdateManager(project_path)
            update_manager.run_all_updates()

            project.version = TvbProfile.current.version.PROJECT_VERSION
            dao.store_entity(project)
    def stop_operation(operation_id):
        """
        Stop a thread for a given operation id
        """
        operation = dao.get_operation_by_id(operation_id)
        if not operation or operation.status != model.STATUS_STARTED:
            LOGGER.warning("Operation already stopped or not found is given to stop job: %s" % operation_id)
            return True

        LOGGER.debug("Stopping operation: %s" % str(operation_id))

        ## Set the thread stop flag to true
        for thread in CURRENT_ACTIVE_THREADS:
            if int(thread.operation_id) == operation_id:
                thread.stop()
                LOGGER.debug("Found running thread for operation: %d" % operation_id)

        ## Kill Thread
        stopped = True
        operation_process = dao.get_operation_process_for_operation(operation_id)
        if operation_process is not None:
            ## Now try to kill the operation if it exists
            stopped = OperationExecutor.stop_pid(operation_process.pid)
            if not stopped:
                LOGGER.debug("Operation %d was probably killed from it's specific thread." % operation_id)
            else:
                LOGGER.debug("Stopped OperationExecutor process for %d" % operation_id)

        ## Mark operation as canceled in DB.
        operation.mark_cancelled()
        dao.store_entity(operation)
        return stopped
 def test_launch_operation_HDD_full_space_started_ops(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     space_taken_by_started = 100
     module = "tvb.tests.framework.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     started_operation = model.Operation(
         self.test_user.id,
         self.test_project.id,
         group.id,
         "",
         status=model.STATUS_STARTED,
         estimated_disk_size=space_taken_by_started,
     )
     dao.store_entity(started_operation)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test": 100}
     TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started - 1)
     tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
     self.assertRaises(
         NoMemoryAvailableException,
         self.operation_service.initiate_operation,
         self.test_user,
         self.test_project.id,
         adapter,
         tmp_folder,
         **data
     )
     self._assert_no_dt2()
Пример #17
0
 def save_project_to_user(user_id, project_id):
     """
     Mark for current user that the given project is the last one selected.
     """
     user = dao.get_user_by_id(user_id)
     user.selected_project = project_id
     dao.store_entity(user)
 def remove_datatype(self, skip_validation = False):
     """
     Called when a TimeSeries is removed.
     """
     associated_cv = dao.get_generic_entity(Covariance, self.handled_datatype.gid, '_source')
     associated_pca = dao.get_generic_entity(PrincipalComponents, self.handled_datatype.gid, '_source')
     associated_is = dao.get_generic_entity(IndependentComponents, self.handled_datatype.gid, '_source')
     associated_cc = dao.get_generic_entity(CrossCorrelation, self.handled_datatype.gid, '_source')
     associated_fr = dao.get_generic_entity(FourierSpectrum, self.handled_datatype.gid, '_source')
     associated_wv = dao.get_generic_entity(WaveletCoefficients, self.handled_datatype.gid, '_source')
     associated_cs = dao.get_generic_entity(CoherenceSpectrum, self.handled_datatype.gid, '_source')
     associated_dm = dao.get_generic_entity(DatatypeMeasure, self.handled_datatype.gid, '_analyzed_datatype')
     for datatype_measure in associated_dm:
         datatype_measure._analyed_datatype = None
         dao.store_entity(datatype_measure)
     msg = "TimeSeries cannot be removed as it is used by at least one "
     if not skip_validation:
         if len(associated_cv) > 0:
             raise RemoveDataTypeException(msg + " Covariance.")
         if len(associated_pca) > 0:
             raise RemoveDataTypeException(msg + " PrincipalComponents.")
         if len(associated_is) > 0:
             raise RemoveDataTypeException(msg + " IndependentComponents.")
         if len(associated_cc) > 0:
             raise RemoveDataTypeException(msg + " CrossCorrelation.")
         if len(associated_fr) > 0:
             raise RemoveDataTypeException(msg + " FourierSpectrum.")
         if len(associated_wv) > 0:
             raise RemoveDataTypeException(msg + " WaveletCoefficients.")
         if len(associated_cs) > 0:
             raise RemoveDataTypeException(msg + " CoherenceSpectrum.")
     ABCRemover.remove_datatype(self, skip_validation)
Пример #19
0
 def add_operation_additional_info(self, message):
     """
     Adds additional info on the operation to be displayed in the UI. Usually a warning message.
     """
     current_op = dao.get_operation_by_id(self.operation_id)
     current_op.additional_info = message
     dao.store_entity(current_op)
Пример #20
0
 def test_validate_user_validated(self):
     """
     Flow for trying to validate a user that was already validated.
     """
     user = model.User("test_user", "test_pass", "*****@*****.**", True, "user")
     dao.store_entity(user)
     assert not self.user_service.validate_user("test_user"), "Validation invalid."
Пример #21
0
    def __upgrade_datatype_list(self, datatypes):
        """
        Upgrade a list of DataTypes to the current version.
        
        :param datatypes: The list of DataTypes that should be upgraded.

        :returns: (nr_of_dts_upgraded_fine, nr_of_dts_upgraded_fault) a two-tuple of integers representing
            the number of DataTypes for which the upgrade worked fine, and the number of DataTypes for which
            some kind of fault occurred
        """
        nr_of_dts_upgraded_fine = 0
        nr_of_dts_upgraded_fault = 0
        for datatype in datatypes:
            specific_datatype = dao.get_datatype_by_gid(datatype.gid)
            if isinstance(specific_datatype, MappedType):
                try:
                    self.upgrade_file(specific_datatype.get_storage_file_path())
                    nr_of_dts_upgraded_fine += 1
                except (MissingDataFileException, FileVersioningException) as ex:
                    # The file is missing for some reason. Just mark the DataType as invalid.
                    datatype.invalid = True
                    dao.store_entity(datatype)
                    nr_of_dts_upgraded_fault += 1
                    self.log.exception(ex)
        return nr_of_dts_upgraded_fine, nr_of_dts_upgraded_fault
Пример #22
0
 def test_validate_user_happy_flow(self):
     """
     Standard flow for a validate user action.
     """
     user = model.User("test_user", "test_pass", "*****@*****.**", False, "user")
     dao.store_entity(user)
     assert self.user_service.validate_user("test_user"), "Validation failed when it shouldn't have."
Пример #23
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['DATA_TYPES_GROUPS']
    create_column(COL_RANGES_1, table)
    create_column(COL_RANGES_2, table)

    try:
        ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column.
        previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0", "no_of_ranges")

        for group in previous_groups:

            operation_group = dao.get_operationgroup_by_id(group.fk_operation_group)
            #group.only_numeric_ranges = operation_group.has_only_numeric_ranges

            if operation_group.range3 is not None:
                group.no_of_ranges = 3
            elif operation_group.range2 is not None:
                group.no_of_ranges = 2
            elif operation_group.range1 is not None:
                group.no_of_ranges = 1
            else:
                group.no_of_ranges = 0

            dao.store_entity(group)

    except Exception, excep:
        ## we can live with a column only having default value. We will not stop the startup.
        logger = get_logger(__name__)
        logger.exception(excep)
Пример #24
0
    def clean_database(self, delete_folders=True):
        """
        Deletes data from all tables
        """
        self.cancel_all_operations()
        LOGGER.warning("Your Database content will be deleted.")
        try:
            session = SessionMaker()
            for table in reversed(model.Base.metadata.sorted_tables):
                # We don't delete data from some tables, because those are 
                # imported only during introspection which is done one time
                if table.name not in self.EXCLUDE_TABLES:
                    try:
                        session.open_session()
                        con = session.connection()
                        LOGGER.debug("Executing Delete From Table " + table.name)
                        con.execute(table.delete())
                        session.commit()
                    except Exception as e:
                        # We cache exception here, in case some table does not exists and
                        # to allow the others to be deleted
                        LOGGER.warning(e)
                        session.rollback()
                    finally:
                        session.close_session()
            LOGGER.info("Database was cleanup!")
        except Exception as excep:
            LOGGER.warning(excep)
            raise

        # Now if the database is clean we can delete also project folders on disk
        if delete_folders:
            self.delete_project_folders()
        dao.store_entity(model.User(TvbProfile.current.web.admin.SYSTEM_USER_NAME, None, None, True, None))
Пример #25
0
    def __init__(self):
        micro_postfix = "_%d" % int(time.time() * 1000000)

        # Here create all structures needed later for data types creation
        self.files_helper = FilesHelper()

        # First create user 
        user = model.User("datatype_factory_user" + micro_postfix, "test_pass",
                          "*****@*****.**" + micro_postfix, True, "user")
        self.user = dao.store_entity(user)

        # Now create a project
        project_service = ProjectService()
        data = dict(name='DatatypesFactoryProject' + micro_postfix, description='test_desc', users=[])
        self.project = project_service.store_project(self.user, True, None, **data)

        # Create algorithm
        alg_category = model.AlgorithmCategory('one', True)
        dao.store_entity(alg_category)
        ad = model.Algorithm(SIMULATOR_MODULE, SIMULATOR_CLASS, alg_category.id)
        self.algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE, SIMULATOR_CLASS)
        if self.algorithm is None:
            self.algorithm = dao.store_entity(ad)

        # Create an operation
        self.meta = {DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME,
                     DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE}
        operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters',
                                    meta=json.dumps(self.meta), status=model.STATUS_FINISHED)
        self.operation = dao.store_entity(operation)
def update_dt(dt_id, new_create_date):
    dt = dao.get_datatype_by_id(dt_id)
    dt.create_date = new_create_date
    dao.store_entity(dt)
    # Update MetaData in H5 as well.
    dt = dao.get_datatype_by_gid(dt.gid)
    dt.persist_full_metadata()
Пример #27
0
 def create_link(data_ids, project_id):
     """
     For a list of dataType IDs and a project id create all the required links.
     """
     for data in data_ids:
         link = model.Links(data, project_id)
         dao.store_entity(link)
Пример #28
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['DATA_TYPES_GROUPS']
    create_column(COL_RANGES_1, table)
    create_column(COL_RANGES_2, table)

    try:
        ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column.
        previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0", "no_of_ranges")

        for group in previous_groups:

            operation_group = dao.get_operationgroup_by_id(group.fk_operation_group)
            #group.only_numeric_ranges = operation_group.has_only_numeric_ranges

            if operation_group.range3 is not None:
                group.no_of_ranges = 3
            elif operation_group.range2 is not None:
                group.no_of_ranges = 2
            elif operation_group.range1 is not None:
                group.no_of_ranges = 1
            else:
                group.no_of_ranges = 0

            dao.store_entity(group)

    except Exception as excep:
        ## we can live with a column only having default value. We will not stop the startup.
        logger = get_logger(__name__)
        logger.exception(excep)
        
    session = SA_SESSIONMAKER()
    session.execute(text("""UPDATE "OPERATIONS"
                               SET status = 
                                CASE
                                    WHEN status = 'FINISHED' THEN '4-FINISHED'
                                    WHEN status = 'STARTED' THEN '3-STARTED'
                                    WHEN status = 'CANCELED' THEN '2-CANCELED'
                                    ELSE '1-ERROR'
                                END
                             WHERE status IN ('FINISHED', 'CANCELED', 'STARTED', 'ERROR');"""))
    session.commit()
    session.close()

    try:
        session = SA_SESSIONMAKER()
        for sim_state in session.query(SimulationState).filter(SimulationState.fk_datatype_group is not None).all():
            session.delete(sim_state)
        session.commit()
        session.close()
    except Exception as excep:
        ## It might happen that SimulationState table is not yet created, e.g. if user has version 1.0.2
        logger = get_logger(__name__)
        logger.exception(excep)
 def __init_algorithmn(self):
     """
     Insert some starting data in the database.
     """
     categ1 = model.AlgorithmCategory('one', True)
     self.categ1 = dao.store_entity(categ1)
     ad = model.Algorithm(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS, categ1.id)
     self.algo_inst = dao.store_entity(ad)
Пример #30
0
 def rename_burst(burst_id, new_name):
     """
     Rename the burst given by burst_id, setting it's new name to
     burst_name.
     """
     burst = dao.get_burst_by_id(burst_id)
     burst.name = new_name
     dao.store_entity(burst)
Пример #31
0
    def test_prepare_name(self):
        """
        Test prepare burst name
        """
        stored_burst = TestFactory.store_burst(self.test_project.id)
        simulation_tuple = self.burst_service.prepare_simulation_name(
            stored_burst, self.test_project.id)
        assert simulation_tuple[0] == 'simulation_' + str(dao.get_number_of_bursts(self.test_project.id) + 1), \
            "The default simulation name is not the defined one"

        busrt_test_name = "Burst Test Name"
        stored_burst.name = busrt_test_name
        stored_burst = dao.store_entity(stored_burst)
        simulation_tuple = self.burst_service.prepare_simulation_name(
            stored_burst, self.test_project.id)
        assert simulation_tuple[
            0] == busrt_test_name, "The burst name is not the given one"
    def test_adapter_launch(self, connectivity_factory, region_mapping_factory,
                            time_series_region_index_factory):
        """
        Test that the adapters launches and successfully generates a datatype measure entry.
        """
        meta = {
            DataTypeMetaData.KEY_SUBJECT: "John Doe",
            DataTypeMetaData.KEY_STATE: "RAW_DATA"
        }
        algo = FlowService().get_algorithm_by_module_and_class(
            IntrospectionRegistry.SIMULATOR_MODULE,
            IntrospectionRegistry.SIMULATOR_CLASS)
        self.operation = model_operation.Operation(
            self.test_user.id,
            self.test_project.id,
            algo.id,
            json.dumps(''),
            meta=json.dumps(meta),
            status=model_operation.STATUS_STARTED)
        self.operation = dao.store_entity(self.operation)

        # Get connectivity, region_mapping and a dummy time_series_region
        connectivity = connectivity_factory()
        region_mapping = region_mapping_factory()
        dummy_time_series_index = time_series_region_index_factory(
            connectivity=connectivity, region_mapping=region_mapping)

        dummy_time_series_index.start_time = 0.0
        dummy_time_series_index.sample_period = 1.0

        dummy_time_series_index = \
        dao.get_generic_entity(dummy_time_series_index.__class__, dummy_time_series_index.gid, 'gid')[0]
        ts_metric_adapter = TimeseriesMetricsAdapter()
        form = TimeseriesMetricsAdapterForm()
        view_model = form.get_view_model()()
        view_model.time_series = UUID(dummy_time_series_index.gid)
        form.fill_trait(view_model)
        ts_metric_adapter.submit_form(form)
        resulted_metric = ts_metric_adapter.launch(view_model)
        assert isinstance(
            resulted_metric,
            DatatypeMeasureIndex), "Result should be a datatype measure."
        assert len(resulted_metric.metrics) >= len(list(ts_metric_adapter.get_form().algorithms.choices)), \
            "At least a result should have been generated for every metric."
        for metric_value in json.loads(resulted_metric.metrics).values():
            assert isinstance(metric_value, (float, int))
Пример #33
0
    def _import_bursts(project_entity, bursts_dict):
        """
        Re-create old bursts, but keep a mapping between the id it has here and the old-id it had
        in the project where they were exported, so we can re-add the datatypes to them.
        """
        burst_ids_mapping = {}

        for old_burst_id in bursts_dict:
            burst_information = BurstInformation.load_from_dict(
                bursts_dict[old_burst_id])
            burst_entity = model.BurstConfiguration(project_entity.id)
            burst_entity.from_dict(burst_information.data)
            burst_entity = dao.store_entity(burst_entity)
            burst_ids_mapping[int(old_burst_id)] = burst_entity.id
            # We don't need the data in dictionary form anymore, so update it with new BurstInformation object
            bursts_dict[old_burst_id] = burst_information
        return burst_ids_mapping
Пример #34
0
    def prepare_operation(self, user_id, project_id, algorithm, view_model_gid,
                          op_group=None, ranges=None, visible=True):

        op_group_id = None
        if op_group:
            op_group_id = op_group.id
        if isinstance(view_model_gid, uuid.UUID):
            view_model_gid = view_model_gid.hex

        operation = Operation(user_id, project_id, algorithm.id, json.dumps({'gid': view_model_gid}),
                              op_group_id=op_group_id, range_values=ranges)
        self.logger.debug("Saving Operation(userId=" + str(user_id) + ",projectId=" + str(project_id) +
                          ",algorithmId=" + str(algorithm.id) + ", ops_group= " + str(op_group_id) + ")")

        operation.visible = visible
        operation = dao.store_entity(operation)
        return operation
Пример #35
0
 def persist_operation_state(self, operation, operation_status, message=None):
     """
     Update Operation instance state. Store it in DB and on HDD/
     :param operation: Operation instance
     :param operation_status: new status
     :param message: message in case of error
     :return: operation instance changed
     """
     operation.mark_complete(operation_status, message)
     operation.queue_full = False
     operation = dao.store_entity(operation)
     # update burst also
     burst_config = self.get_burst_for_operation_id(operation.id)
     if burst_config is not None:
         burst_status = STATUS_FOR_OPERATION.get(operation_status)
         self.mark_burst_finished(burst_config, burst_status, message)
     return operation
Пример #36
0
    def test_read_write_arrays(self):
        """
        Test the filter function when retrieving dataTypes with a filter
        after a column from a class specific table (e.g. DATA_arraywrapper).
        """
        test_array = numpy.array(range(16))
        shapes = [test_array.shape, (2, 8), (2, 2, 4), (2, 2, 2, 2)]
        storage_path = self.flow_service.file_helper.get_project_folder(
            self.operation.project, str(self.operation.id))
        for i in range(4):
            datatype_inst = MappedArray(title="dim_" + str(i + 1),
                                        d_type="MappedArray",
                                        storage_path=storage_path,
                                        module="tvb.datatypes.arrays",
                                        subject="John Doe",
                                        state="RAW",
                                        operation_id=self.operation.id)
            datatype_inst.array_data = test_array.reshape(shapes[i])
            result = dao.store_entity(datatype_inst)
            result.array_data = None

        inserted_data = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")[0]
        assert len(inserted_data) == 4, "Found " + str(len(inserted_data))

        for i in range(4):
            ## inserted_data will be retrieved in the opposite order than the insert order
            actual_datatype = dao.get_generic_entity(MappedArray,
                                                     inserted_data[3 - i][2],
                                                     'gid')[0]
            assert actual_datatype.length_1d, shapes[i][0]
            if i > 0:
                assert actual_datatype.length_2d == shapes[i][1]
            expected_arr = test_array.reshape(shapes[i])
            assert numpy.equal(actual_datatype.array_data, expected_arr).all(),\
                            str(i + 1) + "D Data not read correctly"
            actual_datatype.array_data = None
            ### Check that meta-data are also written for Array attributes.
            metadata = actual_datatype.get_metadata('array_data')
            assert actual_datatype.METADATA_ARRAY_MAX in metadata
            assert metadata[actual_datatype.METADATA_ARRAY_MAX] == 15
            assert actual_datatype.METADATA_ARRAY_MIN in metadata
            assert metadata[actual_datatype.METADATA_ARRAY_MIN] == 0
            assert actual_datatype.METADATA_ARRAY_MEAN in metadata
            assert metadata[actual_datatype.METADATA_ARRAY_MEAN] == 7.5
Пример #37
0
 def create_value_wrapper(test_user, test_project=None):
     """
     Creates a ValueWrapper dataType, and the associated parent Operation.
     This is also used in ProjectStructureTest.
     """
     if test_project is None:
         test_project = TestFactory.create_project(test_user, 'test_proj')
     operation = TestFactory.create_operation(test_user=test_user,
                                              test_project=test_project)
     value_wrapper = ValueWrapper(data_value="5.0",
                                  data_name="my_value",
                                  data_type="float")
     op_dir = FilesHelper().get_project_folder(test_project,
                                               str(operation.id))
     vw_idx = h5.store_complete(value_wrapper, op_dir)
     vw_idx.fk_from_operation = operation.id
     vw_idx = dao.store_entity(vw_idx)
     return test_project, vw_idx.gid, operation
Пример #38
0
    def __populate_project(self, project_path):
        """
        Create and store a Project entity.
        """
        self.logger.debug("Creating project from path: %s" % project_path)
        project_dict = self.files_helper.read_project_metadata(project_path)

        project_entity = manager_of_class(model.Project).new_instance()
        project_entity = project_entity.from_dict(project_dict, self.user_id)

        try:
            self.logger.debug("Storing imported project")
            return dao.store_entity(project_entity)
        except IntegrityError, excep:
            self.logger.exception(excep)
            error_msg = ("Could not import project: %s with gid: %s. There is already a "
                         "project with the same name or gid.") % (project_entity.name, project_entity.gid)
            raise ProjectImportException(error_msg)
Пример #39
0
    def build(data=None, op=None):
        ts = time_series_factory(data)

        if op is None:
            op = operation_factory()

        ts_db = TimeSeriesIndex()
        ts_db.fk_from_operation = op.id
        ts_db.fill_from_has_traits(ts)

        ts_h5_path = h5.path_for_stored_index(ts_db)
        with TimeSeriesH5(ts_h5_path) as f:
            f.store(ts)
            f.sample_rate.store(ts.sample_rate)
            f.nr_dimensions.store(ts.data.ndim)

        ts_db = dao.store_entity(ts_db)
        return ts_db
Пример #40
0
 def store_datatype(self, datatype):
     """This method stores data type into DB"""
     try:
         self.logger.debug("Store datatype: %s with Gid: %s" %
                           (datatype.__class__.__name__, datatype.gid))
         return dao.store_entity(datatype)
     except MissingDataSetException:
         self.logger.error(
             "Datatype %s has missing data and could not be imported properly."
             % (datatype, ))
         os.remove(datatype.get_storage_file_path())
     except IntegrityError as excep:
         self.logger.exception(excep)
         error_msg = "Could not import data with gid: %s. There is already a one with " \
                     "the same name or gid." % datatype.gid
         # Delete file if can't be imported
         os.remove(datatype.get_storage_file_path())
         raise ProjectImportException(error_msg)
Пример #41
0
    def build(row1=None, row2=None, project=None, operation=None, subject=None, state=None):
        data_type = DummyDataType()
        data_type.row1 = row1
        data_type.row2 = row2

        if operation is None:
            operation = operation_factory(test_project=project)

        data_type_index = DummyDataTypeIndex(subject=subject, state=state)
        data_type_index.fk_from_operation = operation.id
        data_type_index.fill_from_has_traits(data_type)

        data_type_h5_path = h5.path_for_stored_index(data_type_index)
        with DummyDataTypeH5(data_type_h5_path) as f:
            f.store(data_type)

        data_type_index = dao.store_entity(data_type_index)
        return data_type_index
Пример #42
0
    def _remove_project_node_files(self, project_id, gid, links, skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)

            if links:
                op = dao.get_operation_by_id(datatype.fk_from_operation)
                # Instead of deleting, we copy the datatype to the linked project
                # We also clone the operation
                new_operation = self.__copy_linked_datatype_before_delete(op, datatype, project,
                                                                          links[0].fk_to_project)

                # If there is a  datatype group and operation group and they were not moved yet to the linked project,
                # then do it
                if datatype.fk_datatype_group is not None:
                    dt_group_op = dao.get_operation_by_id(datatype.fk_from_operation)
                    op_group = dao.get_operationgroup_by_id(dt_group_op.fk_operation_group)
                    op_group.fk_launched_in = links[0].fk_to_project
                    dao.store_entity(op_group)

                    burst = dao.get_burst_for_operation_id(op.id)
                    if burst is not None:
                        burst.fk_project = links[0].fk_to_project
                        dao.store_entity(burst)

                    dt_group = dao.get_datatypegroup_by_op_group_id(op_group.id)
                    dt_group.parent_operation = new_operation
                    dt_group.fk_from_operation = new_operation.id
                    dao.store_entity(dt_group)

            else:
                # There is no link for this datatype so it has to be deleted
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)

                # Remove burst if dt has one and it still exists
                if datatype.fk_parent_burst is not None and datatype.is_ts:
                    burst = dao.get_burst_for_operation_id(datatype.fk_from_operation)

                    if burst is not None:
                        dao.remove_entity(BurstConfiguration, burst.id)

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException("Remove operation failed for unknown reasons.Please contact system administrator.")
Пример #43
0
    def __init__(self):
        now = datetime.now()
        micro_postfix = "_%d" % now.microsecond

        # Here create all structures needed later for data types creation
        self.files_helper = FilesHelper()

        # First create user
        user = model.User("datatype_factory_user" + micro_postfix, "test_pass",
                          "*****@*****.**" + micro_postfix, True, "user")
        self.user = dao.store_entity(user)

        # Now create a project
        project_service = ProjectService()
        data = dict(name='DatatypesFactoryProject' + micro_postfix,
                    description='test_desc',
                    users=[])
        self.project = project_service.store_project(self.user, True, None,
                                                     **data)

        # Create algorithm
        alg_category = model.AlgorithmCategory('one', True)
        dao.store_entity(alg_category)
        alg_group = model.AlgorithmGroup("test_module1", "classname1",
                                         alg_category.id)
        dao.store_entity(alg_group)
        algorithm = model.Algorithm(alg_group.id,
                                    'id',
                                    name='',
                                    req_data='',
                                    param_name='',
                                    output='')
        self.algorithm = dao.store_entity(algorithm)

        #Create an operation
        self.meta = {
            DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME,
            DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE
        }
        operation = model.Operation(self.user.id,
                                    self.project.id,
                                    self.algorithm.id,
                                    'test parameters',
                                    meta=json.dumps(self.meta),
                                    status="FINISHED",
                                    method_name=ABCAdapter.LAUNCH_METHOD)
        self.operation = dao.store_entity(operation)
Пример #44
0
    def build():
        time_series_index = time_series_index_factory()
        time_series = h5.load_from_index(time_series_index)
        data = numpy.random.random((10, 10, 10, 10, 10))
        cross_correlation = temporal_correlations.CrossCorrelation(source=time_series, array_data=data)

        op = operation_factory()

        cross_correlation_index = CrossCorrelationIndex()
        cross_correlation_index.fk_from_operation = op.id
        cross_correlation_index.fill_from_has_traits(cross_correlation)

        cross_correlation_h5_path = h5.path_for_stored_index(cross_correlation_index)
        with CrossCorrelationH5(cross_correlation_h5_path) as f:
            f.store(cross_correlation)

        cross_correlation_index = dao.store_entity(cross_correlation_index)
        return cross_correlation_index
Пример #45
0
    def _prepare_group(self, project_id, existing_dt_group, kwargs):
        """
        Create and store OperationGroup entity, or return None
        """
        # Standard ranges as accepted from UI
        range1_values = self.get_range_values(kwargs, self._range_name(1))
        range2_values = self.get_range_values(kwargs, self._range_name(2))
        available_args = self.__expand_arguments([(kwargs, None)],
                                                 range1_values,
                                                 self._range_name(1))
        available_args = self.__expand_arguments(available_args, range2_values,
                                                 self._range_name(2))
        is_group = False
        ranges = []
        if self._range_name(1) in kwargs and range1_values is not None:
            is_group = True
            ranges.append(
                json.dumps((kwargs[self._range_name(1)], range1_values)))
        if self._range_name(2) in kwargs and range2_values is not None:
            is_group = True
            ranges.append(
                json.dumps((kwargs[self._range_name(2)], range2_values)))
        # Now for additional ranges which might be the case for the 'model exploration'
        last_range_idx = 3
        ranger_name = self._range_name(last_range_idx)
        while ranger_name in kwargs:
            values_for_range = self.get_range_values(kwargs, ranger_name)
            available_args = self.__expand_arguments(available_args,
                                                     values_for_range,
                                                     ranger_name)
            last_range_idx += 1
            ranger_name = self._range_name(last_range_idx)
        if last_range_idx > 3:
            ranges = [
            ]  # Since we only have 3 fields in db for this just hide it
        if not is_group:
            group = None
        elif existing_dt_group is None:
            group = OperationGroup(project_id=project_id, ranges=ranges)
            group = dao.store_entity(group)
        else:
            group = existing_dt_group.parent_operation_group

        return available_args, group
    def test_get_inputs_for_op_group(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The DataType inputs will be from a DataType group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        second_dt.visible = False
        dao.store_entity(second_dt)

        op_group = model.OperationGroup(project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "1", "param_1": first_dt.gid, "param_6": "2"})
        params_2 = json.dumps({"param_5": "1", "param_4": second_dt.gid, "param_6": "5"})

        algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        algo = dao.get_algorithm_by_group(algo_group.id)

        op1 = model.Operation(self.test_user.id, project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id, project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 0)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")

        first_dt.visible = True
        dao.store_entity(first_dt)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")
Пример #47
0
    def prepare_operations_for_workflowsteps(self, workflow_step_list, workflows, user_id, burst_id,
                                             project_id, group, sim_operations):
        """
        Create and store Operation entities from a list of Workflow Steps.
        Will be generated workflows x workflow_step_list Operations.
        For every step in workflow_step_list one OperationGroup and one DataTypeGroup will be created 
        (in case of PSE).
        """

        for step in workflow_step_list:
            operation_group = None
            if (group is not None) and not isinstance(step, model.WorkflowStepView):
                operation_group = model.OperationGroup(project_id=project_id, ranges=group.range_references)
                operation_group = dao.store_entity(operation_group)

            operation = None
            metadata = {DataTypeMetaData.KEY_BURST: burst_id}
            algo_category = dao.get_algorithm_by_id(step.fk_algorithm)
            if algo_category is not None:
                algo_category = algo_category.algorithm_category

            for wf_idx, workflow in enumerate(workflows):
                cloned_w_step = step.clone()
                cloned_w_step.fk_workflow = workflow.id
                dynamic_params = cloned_w_step.dynamic_param
                op_params = cloned_w_step.static_param
                op_params.update(dynamic_params)
                range_values = None
                group_id = None
                if operation_group is not None:
                    group_id = operation_group.id
                    range_values = sim_operations[wf_idx].range_values

                if not isinstance(step, model.WorkflowStepView):
                    ## For visualization steps, do not create operations, as those are not really needed.
                    metadata, user_group = self._prepare_metadata(metadata, algo_category, operation_group, op_params)
                    operation = model.Operation(user_id, project_id, step.fk_algorithm,
                                                json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder),
                                                meta=json.dumps(metadata),
                                                op_group_id=group_id, range_values=range_values, user_group=user_group)
                    operation.visible = step.step_visible
                    operation = dao.store_entity(operation)
                    cloned_w_step.fk_operation = operation.id

                dao.store_entity(cloned_w_step)

            if operation_group is not None and operation is not None:
                datatype_group = model.DataTypeGroup(operation_group, operation_id=operation.id,
                                                     fk_parent_burst=burst_id,
                                                     state=metadata[DataTypeMetaData.KEY_STATE])
                dao.store_entity(datatype_group)
    def test_get_inputs_for_operation(self):
        """
        Tests method get_datatype_and_datatypegroup_inputs_for_operation.
        Verifies filters' influence over results is as expected
        """
        algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        algo = dao.get_algorithm_by_group(algo_group.id)

        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        ids = []
        for datatype in array_wrappers:
            ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        parameters = json.dumps({"param_5": "1", "param_1": array_wrappers[0][2],
                                 "param_2": array_wrappers[1][2], "param_3": array_wrappers[2][2], "param_6": "0"})
        operation = model.Operation(self.test_user.id, self.test_project.id, algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.relevant_filter)
        self.assertEqual(len(inputs), 2)
        self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertFalse(ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.full_filter)
        self.assertEqual(len(inputs), 3, "Incorrect number of operations.")
        self.assertTrue(ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")

        project, dt_group_id, first_dt, _ = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        parameters = json.dumps({"other_param": "_", "param_1": first_dt.gid})
        operation = model.Operation(self.test_user.id, project.id, algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.relevant_filter)
        self.assertEqual(len(inputs), 0, "Incorrect number of dataTypes.")
        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertEqual(inputs[0].id, dt_group_id, "Wrong dataType.")
        self.assertTrue(inputs[0].id != first_dt.id, "Wrong dataType.")
Пример #49
0
    def _populate_algorithm_categories(algo_category):
        algo_category_instance = dao.filter_category(
            algo_category.category_name, algo_category.rawinput,
            algo_category.display, algo_category.launchable,
            algo_category.order_nr)

        if algo_category_instance is not None:
            algo_category_instance.last_introspection_check = datetime.datetime.now(
            )
            algo_category_instance.removed = False
        else:
            algo_category_instance = AlgorithmCategory(
                algo_category.category_name, algo_category.launchable,
                algo_category.rawinput, algo_category.display,
                algo_category.defaultdatastate, algo_category.order_nr,
                datetime.datetime.now())
        algo_category_instance = dao.store_entity(algo_category_instance)

        return algo_category_instance.id
Пример #50
0
    def _populate_algorithms(self, algo_category_class, algo_category_id):
        for adapter_class in self.introspection_registry.ADAPTERS[
                algo_category_class]:
            try:
                if not adapter_class.can_be_active():
                    self.logger.warning(
                        "Skipped Adapter(probably because MATLAB not found):" +
                        str(adapter_class))

                stored_adapter = Algorithm(
                    adapter_class.__module__, adapter_class.__name__,
                    algo_category_id, adapter_class.get_group_name(),
                    adapter_class.get_group_description(),
                    adapter_class.get_ui_name(),
                    adapter_class.get_ui_description(),
                    adapter_class.get_ui_subsection(), datetime.datetime.now())
                adapter_inst = adapter_class()

                adapter_form = adapter_inst.get_form()
                required_datatype = adapter_form.get_required_datatype()
                if required_datatype is not None:
                    required_datatype = required_datatype.__name__
                filters = adapter_form.get_filters()
                if filters is not None:
                    filters = filters.to_json()

                stored_adapter.required_datatype = required_datatype
                stored_adapter.datatype_filter = filters
                stored_adapter.parameter_name = adapter_form.get_input_name()
                stored_adapter.outputlist = str(adapter_inst.get_output())

                inst_from_db = dao.get_algorithm_by_module(
                    adapter_class.__module__, adapter_class.__name__)
                if inst_from_db is not None:
                    stored_adapter.id = inst_from_db.id

                stored_adapter = dao.store_entity(stored_adapter, inst_from_db
                                                  is not None)
                adapter_class.stored_adapter = stored_adapter

            except Exception:
                self.logger.exception("Could not introspect Adapters file:" +
                                      adapter_class.__module__)
Пример #51
0
    def _prepare_metric_operation(self, sim_operation):
        # type: (Operation) -> Operation
        metric_algo = dao.get_algorithm_by_module(MEASURE_METRICS_MODULE,
                                                  MEASURE_METRICS_CLASS)
        datatype_index = h5.REGISTRY.get_index_for_datatype(TimeSeries)
        time_series_index = dao.get_generic_entity(datatype_index,
                                                   sim_operation.id,
                                                   'fk_from_operation')[0]

        view_model = get_class_by_name("{}.{}".format(
            MEASURE_METRICS_MODULE, MEASURE_METRICS_MODEL_CLASS))()
        view_model.time_series = time_series_index.gid
        view_model.algorithms = tuple(choices.values())

        range_values = sim_operation.range_values
        metadata = {
            DataTypeMetaData.KEY_BURST: time_series_index.fk_parent_burst
        }
        metadata, user_group = self._prepare_metadata(
            metadata, metric_algo.algorithm_category, None, {})
        meta_str = json.dumps(metadata)

        parent_burst = dao.get_generic_entity(
            BurstConfiguration, time_series_index.fk_parent_burst, 'id')[0]
        metric_operation_group_id = parent_burst.fk_metric_operation_group
        metric_operation = Operation(sim_operation.fk_launched_by,
                                     sim_operation.fk_launched_in,
                                     metric_algo.id,
                                     json.dumps({'gid': view_model.gid.hex}),
                                     meta_str,
                                     op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        stored_metric_operation = dao.store_entity(metric_operation)

        metrics_datatype_group = dao.get_generic_entity(
            DataTypeGroup, metric_operation_group_id, 'fk_operation_group')[0]
        if metrics_datatype_group.fk_from_operation is None:
            metrics_datatype_group.fk_from_operation = metric_operation.id

        self._store_view_model(stored_metric_operation, sim_operation.project,
                               view_model)
        return stored_metric_operation
Пример #52
0
 def _create_operation(self, project_id, algorithm_id):
     """
     dummy operation
     :param project_id: the project in which the operation is created
     :param algorithm_id: the algorithm to be run for the operation
     :return: a dummy `Operation` with the given specifications
     """
     algorithm = dao.get_algorithm_by_id(algorithm_id)
     meta = {
         DataTypeMetaData.KEY_SUBJECT: "John Doe",
         DataTypeMetaData.KEY_STATE: "RAW_DATA"
     }
     operation = Operation(self.test_user.id,
                           project_id,
                           algorithm.id,
                           'test params',
                           meta=json.dumps(meta),
                           status=STATUS_FINISHED)
     return dao.store_entity(operation)
Пример #53
0
    def test_get_upload_operations(self):
        """
        Test get_all when filter is for Upload category.
        """
        self.__init_algorithmn()
        upload_algo = self._create_algo_for_upload()

        project = model.Project("test_proj_2", self.test_user.id, "desc")
        project = dao.store_entity(project)

        op1 = model.Operation(self.test_user.id, self.test_project.id,
                              self.algo_inst.id, "")
        op2 = model.Operation(self.test_user.id,
                              project.id,
                              upload_algo.id,
                              "",
                              status=model.STATUS_FINISHED)
        op3 = model.Operation(self.test_user.id, self.test_project.id,
                              upload_algo.id, "")
        op4 = model.Operation(self.test_user.id,
                              self.test_project.id,
                              upload_algo.id,
                              "",
                              status=model.STATUS_FINISHED)
        op5 = model.Operation(self.test_user.id,
                              self.test_project.id,
                              upload_algo.id,
                              "",
                              status=model.STATUS_FINISHED)
        operations = dao.store_entities([op1, op2, op3, op4, op5])

        upload_operations = self.project_service.get_all_operations_for_uploaders(
            self.test_project.id)
        self.assertEqual(2, len(upload_operations),
                         "Wrong number of upload operations.")
        upload_ids = [operation.id for operation in upload_operations]
        for i in [3, 4]:
            self.assertTrue(operations[i].id in upload_ids,
                            "The operation should be an upload operation.")
        for i in [0, 1, 2]:
            self.assertFalse(
                operations[i].id in upload_ids,
                "The operation should not be an upload operation.")
Пример #54
0
    def build(connectivity, region_mapping, ts=None, test_user=None, test_project=None, op=None):
        if ts is None:
            ts = time_series_region_factory(connectivity, region_mapping)

        if not op:
            op = operation_factory(test_user=test_user, test_project=test_project)

        ts_db = TimeSeriesRegionIndex()
        ts_db.fk_from_operation = op.id
        ts_db.fill_from_has_traits(ts)

        ts_h5_path = h5.path_for_stored_index(ts_db)
        with TimeSeriesRegionH5(ts_h5_path) as f:
            f.store(ts)
            f.sample_rate.store(ts.sample_rate)
            f.nr_dimensions.store(ts.data.ndim)

        ts_db = dao.store_entity(ts_db)
        return ts_db
Пример #55
0
    def build():

        time_series_index = time_series_index_factory()
        time_series = h5.load_from_index(time_series_index)
        data = numpy.random.random((10, 10))
        covariance = graph.Covariance(source=time_series, array_data=data)

        op = operation_factory()

        covariance_index = CovarianceIndex()
        covariance_index.fk_from_operation = op.id
        covariance_index.fill_from_has_traits(covariance)

        covariance_h5_path = h5.path_for_stored_index(covariance_index)
        with CovarianceH5(covariance_h5_path) as f:
            f.store(covariance)

        covariance_index = dao.store_entity(covariance_index)
        return covariance_index
Пример #56
0
    def test_load_burst_history(self):
        burst_config1 = BurstConfiguration(self.test_project.id)
        burst_config2 = BurstConfiguration(self.test_project.id)
        burst_config3 = BurstConfiguration(self.test_project.id)

        dao.store_entity(burst_config1)
        dao.store_entity(burst_config2)
        dao.store_entity(burst_config3)

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_BURST_CONFIG, burst_config1)
            burst_parameters = self.simulator_controller.load_burst_history()

        assert len(burst_parameters['burst_list']) == 3, "The burst configurations where not stored."
    def test_get_upload_operations(self):
        """
        Test get_all when filter is for Upload category.
        """
        self.__init_algorithmn()
        upload_algo = self._create_algo_for_upload()

        project = Project("test_proj_2", self.test_user.id, "desc")
        project = dao.store_entity(project)

        op1 = Operation(None, self.test_user.id, self.test_project.id,
                        self.algo_inst.id)
        op2 = Operation(None,
                        self.test_user.id,
                        project.id,
                        upload_algo.id,
                        status=STATUS_FINISHED)
        op3 = Operation(None, self.test_user.id, self.test_project.id,
                        upload_algo.id)
        op4 = Operation(None,
                        self.test_user.id,
                        self.test_project.id,
                        upload_algo.id,
                        status=STATUS_FINISHED)
        op5 = Operation(None,
                        self.test_user.id,
                        self.test_project.id,
                        upload_algo.id,
                        status=STATUS_FINISHED)
        operations = dao.store_entities([op1, op2, op3, op4, op5])

        upload_operations = self.project_service.get_all_operations_for_uploaders(
            self.test_project.id)
        assert 2 == len(
            upload_operations), "Wrong number of upload operations."
        upload_ids = [operation.id for operation in upload_operations]
        for i in [3, 4]:
            assert operations[i].id in upload_ids, \
                "The operation should be an upload operation."
        for i in [0, 1, 2]:
            assert not operations[i].id in upload_ids, \
                "The operation should not be an upload operation."
Пример #58
0
    def test_get_members_pages(self):
        """
        Create many users (more than one page of members.
        Create a project and asign all Users as members.
        Test that 2 pages or Project Members are retrieved.
        Now remove extra users, to have only one page of members for the project.
        """
        user_ids = []
        for i in range(MEMBERS_PAGE_SIZE + 3):
            user = model_project.User("test_user_no" + str(i),
                                      "test_user_no" + str(i), "pass",
                                      "*****@*****.**")
            user = dao.store_entity(user)
            user_ids.append(user.id)

        admin = dao.get_user_by_name("test_user_no1")
        data = dict(name='test_proj', description='test_desc', users=user_ids)
        project = ProjectService().store_project(admin, True, None, **data)

        page_users, all_users, pag = self.user_service.get_users_for_project(
            admin.username, project.id, 2)
        assert len(page_users) == (MEMBERS_PAGE_SIZE + 3) % MEMBERS_PAGE_SIZE
        assert len(
            all_users) == MEMBERS_PAGE_SIZE + 3, 'Not all members returned'
        assert pag == 2, 'Invalid page number returned'

        for i in range(3):
            user = dao.get_user_by_name("test_user_no" + str(i + 2))
            self.user_service.delete_user(user.id)

        page_users, all_users, pag = self.user_service.get_users_for_project(
            "test_user_no1", project.id, 2)
        assert len(page_users) == 0, 'Paging not working properly'
        assert len(all_users) == MEMBERS_PAGE_SIZE, 'Not all members returned'
        assert pag == 1, 'Invalid page number returned'

        page_users, all_users, pag = self.user_service.get_users_for_project(
            "test_user_no1", project.id, 1)
        assert len(
            page_users) == MEMBERS_PAGE_SIZE, 'Paging not working properly'
        assert len(all_users) == MEMBERS_PAGE_SIZE, 'Not all members returned'
        assert pag == 1, 'Invalid page number returned'
Пример #59
0
    def _prepare_metric_operation(self, sim_operation):
        # type: (Operation) -> None
        metric_algo = dao.get_algorithm_by_module(
            TimeseriesMetricsAdapter.__module__,
            TimeseriesMetricsAdapter.__name__)
        time_series_index = dao.get_generic_entity(TimeSeriesIndex,
                                                   sim_operation.id,
                                                   'fk_from_operation')[0]

        view_model = TimeseriesMetricsAdapterModel()
        view_model.time_series = time_series_index.gid
        view_model.algorithms = tuple(choices.values())

        range_values = sim_operation.range_values
        metadata = {
            DataTypeMetaData.KEY_BURST: time_series_index.fk_parent_burst
        }
        metadata, user_group = self._prepare_metadata(
            metadata, metric_algo.algorithm_category, None, {})
        meta_str = json.dumps(metadata)

        parent_burst = dao.get_generic_entity(
            BurstConfiguration, time_series_index.fk_parent_burst, 'id')[0]
        metric_operation_group_id = parent_burst.fk_metric_operation_group
        metric_operation = Operation(sim_operation.fk_launched_by,
                                     sim_operation.fk_launched_in,
                                     metric_algo.id,
                                     json.dumps({'gid': view_model.gid.hex}),
                                     meta_str,
                                     op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        stored_metric_operation = dao.store_entity(metric_operation)

        metrics_datatype_group = dao.get_generic_entity(
            DataTypeGroup, metric_operation_group_id, 'fk_operation_group')[0]
        if metrics_datatype_group.fk_from_operation is None:
            metrics_datatype_group.fk_from_operation = metric_operation.id

        OperationService._store_view_model(stored_metric_operation,
                                           sim_operation.project, view_model)
        return stored_metric_operation
Пример #60
0
 def test_get_available_bursts_happy(self):
     """
     Test that all the correct burst are returned for the given project.
     """
     project = Project("second_test_proj", self.test_user.id, "description")
     second_project = dao.store_entity(project)
     test_project_bursts = [TestFactory.store_burst(self.test_project.id).id for _ in range(4)]
     second_project_bursts = [TestFactory.store_burst(second_project.id).id for _ in range(3)]
     returned_test_project_bursts = [burst.id for burst in
                                     self.burst_service.get_available_bursts(self.test_project.id)]
     returned_second_project_bursts = [burst.id for burst in
                                       self.burst_service.get_available_bursts(second_project.id)]
     assert len(test_project_bursts) == len(returned_test_project_bursts), \
         "Incorrect bursts retrieved for project %s." % self.test_project
     assert len(second_project_bursts) == len(returned_second_project_bursts), \
         "Incorrect bursts retrieved for project %s." % second_project
     assert set(second_project_bursts) == set(returned_second_project_bursts), \
         "Incorrect bursts retrieved for project %s." % second_project
     assert set(test_project_bursts) == set(returned_test_project_bursts), \
         "Incorrect bursts retrieved for project %s." % self.test_project