def test_usermanagement_post_valid(self):
     """
     Create a valid post and check that user is created.
     """
     self.test_user.role = "ADMINISTRATOR"
     self.test_user = dao.store_entity(self.test_user)
     cherrypy.session[common.KEY_USER] = self.test_user
     TestFactory.create_user(username="******")
     TestFactory.create_user(username="******", validated=False)
     user_before_delete = dao.get_user_by_name("to_be_deleted")
     self.assertTrue(user_before_delete is not None)
     user_before_validation = dao.get_user_by_name("to_validate")
     self.assertFalse(user_before_validation.validated)
     data = {
         "delete_%i" % user_before_delete.id: True,
         "role_%i" % user_before_validation.id: "ADMINISTRATOR",
         "validate_%i" % user_before_validation.id: True
     }
     self.user_c.usermanagement(do_persist=True, **data)
     user_after_delete = dao.get_user_by_id(user_before_delete.id)
     self.assertTrue(user_after_delete is None, "User should be deleted.")
     user_after_validation = dao.get_user_by_id(user_before_validation.id)
     self.assertTrue(user_after_validation.validated,
                     "User should be validated now.")
     self.assertTrue(user_after_validation.role == "ADMINISTRATOR",
                     "Role has not changed.")
示例#2
0
 def edit_user(self, edited_user, old_password=None):
     """
     Retrieve a user by and id, then modify it's role and validate status.
     """
     if edited_user.validated:
         self.validate_user(user_id=edited_user.id)
     user = dao.get_user_by_id(edited_user.id)
     user.role = edited_user.role
     user.validated = edited_user.validated
     if old_password is not None:
         if user.password == old_password:
             user.password = edited_user.password
         else:
             raise UsernameException("Invalid old password!")
     user.email = edited_user.email
     for key, value in six.iteritems(edited_user.preferences):
         user.preferences[key] = value
     dao.store_entity(user)
     if user.is_administrator():
         TvbProfile.current.manager.add_entries_to_config_file({
             SettingsService.KEY_ADMIN_EMAIL:
             user.email,
             SettingsService.KEY_ADMIN_PWD:
             user.password
         })
示例#3
0
    def get_operation_details(self, operation_gid, is_group):
        """
        :returns: an entity OperationOverlayDetails filled with all information for current operation details.
        """

        if is_group:
            operation_group = self.get_operation_group_by_gid(operation_gid)
            operation = dao.get_operations_in_group(operation_group.id, False, True)
            # Reload, to make sure all attributes lazy are populated as well.
            operation = dao.get_operation_by_gid(operation.gid)
            no_of_op_in_group = dao.get_operations_in_group(operation_group.id, is_count=True)
            datatype_group = self.get_datatypegroup_by_op_group_id(operation_group.id)
            count_result = dao.count_datatypes_in_group(datatype_group.id)

        else:
            operation = dao.get_operation_by_gid(operation_gid)
            if operation is None:
                return None
            no_of_op_in_group = 1
            count_result = dao.count_resulted_datatypes(operation.id)

        user_display_name = dao.get_user_by_id(operation.fk_launched_by).display_name
        burst = dao.get_burst_for_operation_id(operation.id)
        datatypes_param, all_special_params = self._review_operation_inputs(operation.gid)

        op_pid = dao.get_operation_process_for_operation(operation.id)
        op_details = OperationOverlayDetails(operation, user_display_name, len(datatypes_param),
                                             count_result, burst, no_of_op_in_group, op_pid)

        # Add all parameter which are set differently by the user on this Operation.
        if all_special_params is not None:
            op_details.add_scientific_fields(all_special_params)
        return op_details
示例#4
0
    def get_operation_details(self, operation_gid, is_group):
        """
        :returns: an entity OperationOverlayDetails filled with all information for current operation details.
        """

        if is_group:
            operation_group = self.get_operation_group_by_gid(operation_gid)
            operation = dao.get_operations_in_group(operation_group.id, False, True)
            ## Reload, to make sure all attributes lazy are populated as well.
            operation = dao.get_operation_by_gid(operation.gid)
            no_of_op_in_group = dao.get_operations_in_group(operation_group.id, is_count=True)
            datatype_group = self.get_datatypegroup_by_op_group_id(operation_group.id)
            count_result = dao.count_datatypes_in_group(datatype_group.id)

        else:
            operation = dao.get_operation_by_gid(operation_gid)
            if operation is None:
                return None
            no_of_op_in_group = 1
            count_result = dao.count_resulted_datatypes(operation.id)

        username = dao.get_user_by_id(operation.fk_launched_by).username
        burst = dao.get_burst_for_operation_id(operation.id)
        datatypes_param, all_special_params = ProjectService._review_operation_inputs(operation.gid)

        op_pid = dao.get_operation_process_for_operation(operation.id)
        op_details = OperationOverlayDetails(operation, username, len(datatypes_param),
                                             count_result, burst, no_of_op_in_group, op_pid)

        ## Add all parameter which are set differently by the user on this Operation.
        if all_special_params is not None:
            op_details.add_scientific_fields(all_special_params)
        return op_details
示例#5
0
 def save_project_to_user(user_id, project_id):
     """
     Mark for current user that the given project is the last one selected.
     """
     user = dao.get_user_by_id(user_id)
     user.selected_project = project_id
     dao.store_entity(user)
示例#6
0
 def validate_user(self, name='', user_id=None):
     """
     Service layer for editing a user and validating the account.
     """
     try:
         if user_id:
             user = dao.get_user_by_id(user_id)
         else:
             user = dao.get_user_by_name(name)
         if user is None or user.validated:
             self.logger.debug("UserName not found or already validated:" +
                               name)
             return False
         user.validated = True
         user = dao.store_entity(user)
         self.logger.debug("Sending validation email for userName="******" to address=" + user.email)
         email_sender.send(
             FROM_ADDRESS, user.email, SUBJECT_VALIDATE, "Hello " + name +
             TEXT_VALIDATED + TvbProfile.current.web.BASE_URL + "user/")
         self.logger.info("User:"******" was validated successfully" +
                          " and notification email sent!")
         return True
     except Exception as excep:
         self.logger.warning('Could not validate user:'******'WARNING : ' + str(excep))
         return False
示例#7
0
 def save_project_to_user(user_id, project_id):
     """
     Mark for current user that the given project is the last one selected.
     """
     user = dao.get_user_by_id(user_id)
     user.selected_project = project_id
     dao.store_entity(user)
 def prepare_next_step(self, last_executed_op_id):
     """
     If the operation with id 'last_executed_op_id' resulted after
     the execution of a workflow step then this method will launch
     the operation corresponding to the next step from the workflow.
     """
     try:
         current_step, next_workflow_step = self._get_data(last_executed_op_id)
         if next_workflow_step is not None:
             operation = dao.get_operation_by_id(next_workflow_step.fk_operation)
             dynamic_param_names = next_workflow_step.dynamic_workflow_param_names
             if len(dynamic_param_names) > 0:
                 op_params = json.loads(operation.parameters)
                 for param_name in dynamic_param_names:
                     dynamic_param = op_params[param_name]
                     former_step = dao.get_workflow_step_by_step_index(next_workflow_step.fk_workflow,
                                                                       dynamic_param[wf_cfg.STEP_INDEX_KEY])
                     if type(dynamic_param[wf_cfg.DATATYPE_INDEX_KEY]) is IntType: 
                         datatypes = dao.get_results_for_operation(former_step.fk_operation)
                         op_params[param_name] = datatypes[dynamic_param[wf_cfg.DATATYPE_INDEX_KEY]].gid
                     else:
                         previous_operation = dao.get_operation_by_id(former_step.fk_operation)
                         op_params[param_name] = json.loads(previous_operation.parameters)[
                             dynamic_param[wf_cfg.DATATYPE_INDEX_KEY]]
                 operation.parameters = json.dumps(op_params)
                 operation = dao.store_entity(operation)
             return operation.id
         else:
             if current_step is not None:
                 current_workflow = dao.get_workflow_by_id(current_step.fk_workflow)
                 current_workflow.status = current_workflow.STATUS_FINISHED
                 dao.store_entity(current_workflow)
                 burst_entity = dao.get_burst_by_id(current_workflow.fk_burst)
                 parallel_workflows = dao.get_workflows_for_burst(burst_entity.id)
                 all_finished = True
                 for workflow in parallel_workflows:
                     if workflow.status == workflow.STATUS_STARTED:
                         all_finished = False
                 if all_finished:
                     self.mark_burst_finished(burst_entity, success=True)
                     disk_size = dao.get_burst_disk_size(burst_entity.id)  # Transform from kB to MB
                     if disk_size > 0:
                         user = dao.get_project_by_id(burst_entity.fk_project).administrator
                         user.used_disk_space = user.used_disk_space + disk_size
                         dao.store_entity(user)
             else:
                 operation = dao.get_operation_by_id(last_executed_op_id)
                 disk_size = dao.get_disk_size_for_operation(operation.id)  # Transform from kB to MB
                 if disk_size > 0:
                     user = dao.get_user_by_id(operation.fk_launched_by)
                     user.used_disk_space = user.used_disk_space + disk_size
                     dao.store_entity(user)
         return None
     except Exception, excep:
         self.logger.error(excep)
         self.logger.exception(excep)
         raise WorkflowInterStepsException(excep)
 def test_edit_entity_forget_commit(self):
     """
     Commit should be done automatically if you forget for some reason to do so in case of new/update/deletes.
     """
     stored_user = TestFactory.create_user('username', 'password', 'mail', True, 'role')
     user_id = stored_user.id
     self._dao_change_user_forget_commit(user_id, 'new_name')
     edited_user = dao.get_user_by_id(user_id)
     self.assertEqual(edited_user.username, 'new_name', "User should be edited but isnt. Expected 'new_name' got %s"%(edited_user.username,))
 def test_profile_edit(self):
     """
     Simulate a edit of the email and check that data is actually changed.
     """
     edited_data = {'email': u'*****@*****.**'}
     cherrypy.request.method = "POST"
     self.user_c.profile(save=True, **edited_data)
     user = dao.get_user_by_id(self.test_user.id)
     self.assertEqual('*****@*****.**', user.email)
 def test_profile_edit(self):
     """
     Simulate a edit of the email and check that data is actually changed.
     """
     edited_data = {'email': u'*****@*****.**'}
     cherrypy.request.method = "POST"
     self.user_c.profile(save=True, **edited_data)
     user = dao.get_user_by_id(self.test_user.id)
     self.assertEqual('*****@*****.**', user.email)
 def test_edit_entity_forget_commit(self):
     """
     Commit should be done automatically if you forget for some reason to do so in case of new/update/deletes.
     """
     stored_user = TestFactory.create_user('username', 'password', 'mail', True, 'role')
     user_id = stored_user.id
     self._dao_change_user_forget_commit(user_id, 'new_name')
     edited_user = dao.get_user_by_id(user_id)
     self.assertEqual(edited_user.username, 'new_name',
                      "User should be edited but it is not. Expected 'new_name' got %s" % edited_user.username)
 def test_usermanagement_post_valid(self):
     """
     Create a valid post and check that user is created.
     """
     self.test_user.role = "ADMINISTRATOR"
     self.test_user = dao.store_entity(self.test_user)
     cherrypy.session[common.KEY_USER] = self.test_user
     TestFactory.create_user(username="******")
     TestFactory.create_user(username="******", validated=False)
     user_before_delete = dao.get_user_by_name("to_be_deleted")
     self.assertTrue(user_before_delete is not None)
     user_before_validation = dao.get_user_by_name("to_validate")
     self.assertFalse(user_before_validation.validated)
     data = {"delete_%i" % user_before_delete.id: True,
             "role_%i" % user_before_validation.id: "ADMINISTRATOR",
             "validate_%i" % user_before_validation.id: True}
     self.user_c.usermanagement(do_persist=True, **data)
     user_after_delete = dao.get_user_by_id(user_before_delete.id)
     self.assertTrue(user_after_delete is None, "User should be deleted.")
     user_after_validation = dao.get_user_by_id(user_before_validation.id)
     self.assertTrue(user_after_validation.validated, "User should be validated now.")
     self.assertTrue(user_after_validation.role == "ADMINISTRATOR", "Role has not changed.")
 def test_validate_valid(self):
     """
     Pass a valid user and test that it is actually validate.
     """
     self.test_user.role = "ADMINISTRATOR"
     self.test_user = dao.store_entity(self.test_user)
     cherrypy.session[common.KEY_USER] = self.test_user
     TestFactory.create_user(username="******", validated=False)
     user_before_validation = dao.get_user_by_name("to_validate")
     self.assertFalse(user_before_validation.validated)
     self._expect_redirect('/tvb', self.user_c.validate, user_before_validation.username)
     user_after_validation = dao.get_user_by_id(user_before_validation.id)
     self.assertTrue(user_after_validation.validated, "User should be validated.")
     self.assertTrue(cherrypy.session[common.KEY_MESSAGE_TYPE] == common.TYPE_INFO)
 def test_validate_valid(self):
     """
     Pass a valid user and test that it is actually validate.
     """
     self.test_user.role = "ADMINISTRATOR"
     self.test_user = dao.store_entity(self.test_user)
     cherrypy.session[b_c.KEY_USER] = self.test_user
     TestFactory.create_user(username="******", validated=False)
     user_before_validation = dao.get_user_by_name("to_validate")
     self.assertFalse(user_before_validation.validated)
     self._expect_redirect('/tvb', self.user_c.validate, user_before_validation.username)
     user_after_validation = dao.get_user_by_id(user_before_validation.id)
     self.assertTrue(user_after_validation.validated, "User should be validated.")
     self.assertTrue(cherrypy.session[b_c.KEY_MESSAGE_TYPE] == b_c.TYPE_INFO)
    def initiate_prelaunch(self, operation, adapter_instance, temp_files, **kwargs):
        """
        Public method.
        This should be the common point in calling an adapter- method.
        """
        result_msg = ""
        try:
            unique_id = None
            if self.ATT_UID in kwargs:
                unique_id = kwargs[self.ATT_UID]
            if operation.method_name == ABCAdapter.LAUNCH_METHOD:
                filtered_kwargs = adapter_instance.prepare_ui_inputs(kwargs)
            else:
                filtered_kwargs = kwargs

            self.logger.debug("Launching operation " + str(operation.id) + "." +
                              operation.method_name + " with " + str(filtered_kwargs))
            operation = dao.get_operation_by_id(operation.id)   # Load Lazy fields

            params = dict()
            for k, value_ in filtered_kwargs.items():
                params[str(k)] = value_

            disk_space_per_user = cfg.MAX_DISK_SPACE
            pending_op_disk_space = dao.compute_disk_size_for_started_ops(operation.fk_launched_by)
            user_disk_space = dao.get_user_by_id(operation.fk_launched_by).used_disk_space  # Transform from kB to Bytes
            available_space = disk_space_per_user - pending_op_disk_space - user_disk_space

            result_msg, nr_datatypes = adapter_instance._prelaunch(operation, unique_id, available_space, **params)
            operation = dao.get_operation_by_id(operation.id)
            ## Update DB stored kwargs for search purposes, to contain only valuable params (no unselected options)
            operation.parameters = json.dumps(kwargs)
            operation.mark_complete(model.STATUS_FINISHED)
            if nr_datatypes > 0:
                #### Write operation meta-XML only if some result are returned
                self.file_helper.write_operation_metadata(operation)
            dao.store_entity(operation)
            self._remove_files(temp_files)

        except zipfile.BadZipfile, excep:
            msg = "The uploaded file is not a valid ZIP!"
            self._handle_exception(excep, temp_files, msg, operation)
示例#17
0
    def initiate_prelaunch(self, operation, adapter_instance, temp_files, **kwargs):
        """
        Public method.
        This should be the common point in calling an adapter- method.
        """
        result_msg = ""
        try:
            unique_id = None
            if self.ATT_UID in kwargs:
                unique_id = kwargs[self.ATT_UID]
            if operation.method_name == ABCAdapter.LAUNCH_METHOD:
                filtered_kwargs = adapter_instance.prepare_ui_inputs(kwargs)
            else:
                filtered_kwargs = kwargs

            self.logger.debug("Launching operation " + str(operation.id) + "." +
                              operation.method_name + " with " + str(filtered_kwargs))
            operation = dao.get_operation_by_id(operation.id)   # Load Lazy fields

            params = dict()
            for k, value_ in filtered_kwargs.items():
                params[str(k)] = value_

            disk_space_per_user = TvbProfile.current.MAX_DISK_SPACE
            pending_op_disk_space = dao.compute_disk_size_for_started_ops(operation.fk_launched_by)
            user_disk_space = dao.get_user_by_id(operation.fk_launched_by).used_disk_space  # Transform from kB to Bytes
            available_space = disk_space_per_user - pending_op_disk_space - user_disk_space

            result_msg, nr_datatypes = adapter_instance._prelaunch(operation, unique_id, available_space, **params)
            operation = dao.get_operation_by_id(operation.id)
            ## Update DB stored kwargs for search purposes, to contain only valuable params (no unselected options)
            operation.parameters = json.dumps(kwargs)
            operation.mark_complete(model.STATUS_FINISHED)
            if nr_datatypes > 0:
                #### Write operation meta-XML only if some result are returned
                self.file_helper.write_operation_metadata(operation)
            dao.store_entity(operation)
            self._remove_files(temp_files)

        except zipfile.BadZipfile, excep:
            msg = "The uploaded file is not a valid ZIP!"
            self._handle_exception(excep, temp_files, msg, operation)
示例#18
0
 def edit_user(self, edited_user, old_password=None):
     """
     Retrieve a user by and id, then modify it's role and validate status.
     """
     if edited_user.validated:
         self.validate_user(user_id=edited_user.id)
     user = dao.get_user_by_id(edited_user.id)
     user.role = edited_user.role
     user.validated = edited_user.validated
     if old_password is not None:
         if user.password == old_password:
             user.password = edited_user.password
         else:
             raise UsernameException("Invalid old password!")
     user.email = edited_user.email
     for key, value in edited_user.preferences.iteritems():
         user.preferences[key] = value
     dao.store_entity(user)
     if user.is_administrator():
         TvbProfile.current.manager.add_entries_to_config_file({SettingsService.KEY_ADMIN_EMAIL: user.email,
                                                                SettingsService.KEY_ADMIN_PWD: user.password})
示例#19
0
 def validate_user(self, name='', user_id=None):
     """
     Service layer for editing a user and validating the account.
     """
     try:
         if user_id:
             user = dao.get_user_by_id(user_id)
         else:
             user = dao.get_user_by_name(name)
         if user is None or user.validated:
             self.logger.debug("UserName not found or already validated:" + name)
             return False
         user.validated = True
         user = dao.store_entity(user)
         self.logger.debug("Sending validation email for userName="******" to address=" + user.email)
         email_sender.send(FROM_ADDRESS, user.email, SUBJECT_VALIDATE,
                           "Hello " + name + TEXT_VALIDATED + TvbProfile.current.web.BASE_URL + "user/")
         self.logger.info("User:"******" was validated successfully" + " and notification email sent!")
         return True
     except Exception, excep:
         self.logger.warning('Could not validate user:'******'WARNING : ' + str(excep))
         return False
示例#20
0
    def __import_pearson_coefficients_datatype(self, fc_folder, patient,
                                               user_tag, ts_gid):
        path = os.path.join(fc_folder, self.FC_MAT_FILE)
        result = ABCUploader.read_matlab_data(path, self.FC_DATASET_NAME)
        result = result.reshape((result.shape[0], result.shape[1], 1, 1))

        project = dao.get_project_by_id(self.current_project_id)
        user = dao.get_user_by_id(project.fk_admin)

        pearson_gid = uuid.uuid4()
        h5_path = "CorrelationCoefficients_{}.h5".format(pearson_gid.hex)
        operation_folder = self.storage_interface.get_project_folder(
            project.name, str(self.operation_id))
        h5_path = os.path.join(operation_folder, h5_path)

        generic_attributes = GenericAttributes()
        generic_attributes.user_tag_1 = user_tag
        generic_attributes.state = DEFAULTDATASTATE_RAW_DATA

        with CorrelationCoefficientsH5(h5_path) as pearson_correlation_h5:
            pearson_correlation_h5.array_data.store(result)
            pearson_correlation_h5.gid.store(pearson_gid)
            pearson_correlation_h5.source.store(ts_gid)
            pearson_correlation_h5.labels_ordering.store(
                CorrelationCoefficients.labels_ordering.default)
            pearson_correlation_h5.subject.store(patient)
            pearson_correlation_h5.store_generic_attributes(generic_attributes)

        pearson_correlation_index = CorrelationCoefficientsIndex()
        pearson_correlation_index.gid = pearson_gid.hex
        pearson_correlation_index.fk_from_operation = self.operation_id
        pearson_correlation_index.subject = patient
        pearson_correlation_index.state = DEFAULTDATASTATE_RAW_DATA
        pearson_correlation_index.ndim = 4
        pearson_correlation_index.fk_source_gid = ts_gid.hex  # we need a random gid here to store the index
        pearson_correlation_index.has_valid_time_series = False
        dao.store_entity(pearson_correlation_index)
示例#21
0
    def retrieve_project_full(self, project_id, applied_filters=None, current_page=1):
        """
        Return a Tuple with Project entity and Operations for current Project.
        :param project_id: Current Project Identifier
        :param applied_filters: Filters to apply on Operations
        :param current_page: Number for current page in operations
        """
        selected_project = self.find_project(project_id)
        total_filtered = self.count_filtered_operations(project_id, applied_filters)
        pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0)
        total_ops_nr = self.count_filtered_operations(project_id)

        start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1)
        current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, OPERATIONS_PAGE_SIZE)
        if current_ops is None:
            return selected_project, 0, [], 0

        operations = []
        view_categ_id = dao.get_visualisers_categories()[0].id
        for one_op in current_ops:
            try:
                result = {}
                if one_op[0] != one_op[1]:
                    result["id"] = str(one_op[0]) + "-" + str(one_op[1])
                else:
                    result["id"] = str(one_op[0])
                burst = dao.get_burst_for_operation_id(one_op[0])
                result["burst_name"] = burst.name if burst else '-'
                result["count"] = one_op[2]
                result["gid"] = one_op[13]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3])
                        result["datatype_group_gid"] = datatype_group.gid
                        result["gid"] = operation_group.gid
                        ## Filter only viewers for current DataTypeGroup entity:
                        result["view_groups"] = FlowService().get_visualizers_for_group(datatype_group.gid)
                    except Exception:
                        self.logger.exception("We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["user"] = dao.get_user_by_id(one_op[5])
                if type(one_op[6]) is str:
                    result["create"] = string2date(str(one_op[6]))
                else:
                    result["create"] = one_op[6]
                if type(one_op[7]) is str:
                    result["start"] = string2date(str(one_op[7]))
                else:
                    result["start"] = one_op[7]
                if type(one_op[8]) is str:
                    result["complete"] = string2date(str(one_op[8]))
                else:
                    result["complete"] = one_op[8]

                if result["complete"] is not None and result["start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] - result["start"])
                result["status"] = one_op[9]
                result["additional"] = one_op[10]
                result["visible"] = True if one_op[11] > 0 else False
                result['operation_tag'] = one_op[12]
                result['figures'] = None
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(result['id'])
                    result['results'] = []
                    for dt in datatype_results:
                        dt_loaded = ABCAdapter.load_entity_by_gid(dt.gid)
                        if dt_loaded:
                            result['results'].append(dt_loaded)
                        else:
                            self.logger.warning("Could not retrieve datatype %s" % str(dt))

                    operation_figures = dao.get_figures_for_operation(result['id'])

                    # Compute the full path to the figure / image on disk
                    for figure in operation_figures:
                        figures_folder = self.structure_helper.get_images_folder(figure.project.name)
                        figure_full_path = os.path.join(figures_folder, figure.file_path)
                        # Compute the path available from browser
                        figure.figure_path = utils.path2url_part(figure_full_path)

                    result['figures'] = operation_figures
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                ## We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception("Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no
示例#22
0
    def build(existing_op_id):
        op = dao.get_operation_by_id(existing_op_id)
        project = dao.get_project_by_id(op.fk_launched_in)
        user = dao.get_user_by_id(op.fk_launched_by)

        return operation_factory(test_user=user, test_project=project), project.id
示例#23
0
 def prepare_next_step(self, last_executed_op_id):
     """
     If the operation with id 'last_executed_op_id' resulted after
     the execution of a workflow step then this method will launch
     the operation corresponding to the next step from the workflow.
     """
     try:
         current_step, next_workflow_step = self._get_data(
             last_executed_op_id)
         if next_workflow_step is not None:
             operation = dao.get_operation_by_id(
                 next_workflow_step.fk_operation)
             dynamic_param_names = next_workflow_step.dynamic_workflow_param_names
             if len(dynamic_param_names) > 0:
                 op_params = json.loads(operation.parameters)
                 for param_name in dynamic_param_names:
                     dynamic_param = op_params[param_name]
                     former_step = dao.get_workflow_step_by_step_index(
                         next_workflow_step.fk_workflow,
                         dynamic_param[wf_cfg.STEP_INDEX_KEY])
                     if type(dynamic_param[
                             wf_cfg.DATATYPE_INDEX_KEY]) is IntType:
                         datatypes = dao.get_results_for_operation(
                             former_step.fk_operation)
                         op_params[param_name] = datatypes[dynamic_param[
                             wf_cfg.DATATYPE_INDEX_KEY]].gid
                     else:
                         previous_operation = dao.get_operation_by_id(
                             former_step.fk_operation)
                         op_params[param_name] = json.loads(
                             previous_operation.parameters)[dynamic_param[
                                 wf_cfg.DATATYPE_INDEX_KEY]]
                 operation.parameters = json.dumps(op_params)
                 operation = dao.store_entity(operation)
             return operation.id
         else:
             if current_step is not None:
                 current_workflow = dao.get_workflow_by_id(
                     current_step.fk_workflow)
                 current_workflow.status = current_workflow.STATUS_FINISHED
                 dao.store_entity(current_workflow)
                 burst_entity = dao.get_burst_by_id(
                     current_workflow.fk_burst)
                 parallel_workflows = dao.get_workflows_for_burst(
                     burst_entity.id)
                 all_finished = True
                 for workflow in parallel_workflows:
                     if workflow.status == workflow.STATUS_STARTED:
                         all_finished = False
                 if all_finished:
                     self.mark_burst_finished(burst_entity, success=True)
                     disk_size = dao.get_burst_disk_size(
                         burst_entity.id)  #Transform from kB to MB
                     if disk_size > 0:
                         user = dao.get_project_by_id(
                             burst_entity.fk_project).administrator
                         user.used_disk_space = user.used_disk_space + disk_size
                         dao.store_entity(user)
             else:
                 operation = dao.get_operation_by_id(last_executed_op_id)
                 disk_size = dao.get_disk_size_for_operation(
                     operation.id)  #Transform from kB to MB
                 if disk_size > 0:
                     user = dao.get_user_by_id(operation.fk_launched_by)
                     user.used_disk_space = user.used_disk_space + disk_size
                     dao.store_entity(user)
         return None
     except Exception, excep:
         self.logger.error(excep)
         self.logger.exception(excep)
         raise WorkflowInterStepsException(excep)
示例#24
0
 def get_user_by_id(user_id):
     """
     Retrieves a user by its id.
     """
     return dao.get_user_by_id(user_id)
示例#25
0
    def retrieve_project_full(self, project_id, applied_filters=None, current_page=1):
        """
        Return a Tuple with Project entity and Operations for current Project.
        :param project_id: Current Project Identifier
        :param applied_filters: Filters to apply on Operations
        :param current_page: Number for current page in operations
        """
        selected_project = self.find_project(project_id)
        total_filtered = self.count_filtered_operations(project_id, applied_filters)
        pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0)
        total_ops_nr = self.count_filtered_operations(project_id)

        start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1)
        current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, OPERATIONS_PAGE_SIZE)
        if current_ops is None:
            return selected_project, 0, [], 0

        operations = []
        view_categ_id = dao.get_visualisers_categories()[0].id
        for one_op in current_ops:
            try:
                result = {}
                if one_op[0] != one_op[1]:
                    result["id"] = str(one_op[0]) + "-" + str(one_op[1])
                else:
                    result["id"] = str(one_op[0])
                burst = dao.get_burst_for_operation_id(one_op[0])
                result["burst_name"] = burst.name if burst else '-'
                result["count"] = one_op[2]
                result["gid"] = one_op[14]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(model.OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3])
                        result["datatype_group_gid"] = datatype_group.gid
                        result["gid"] = operation_group.gid

                        ## Filter only viewers for current DataTypeGroup entity:
                        launcher = self.retrieve_launchers(datatype_group.gid,
                                                           include_categories=[view_categ_id]).values()[0]
                        view_groups = []
                        for launcher in launcher.values():
                            url = '/flow/' + str(launcher['category']) + '/' + str(launcher['id'])
                            if launcher['part_of_group']:
                                url = '/flow/prepare_group_launch/' + datatype_group.gid + '/' + \
                                      str(launcher['category']) + '/' + str(launcher['id'])
                            view_groups.append(dict(name=launcher["displayName"],
                                                    url=url,
                                                    param_name=launcher['children'][0]['param_name'],
                                                    part_of_group=launcher['part_of_group']))
                        result["view_groups"] = view_groups

                    except Exception:
                        self.logger.exception("We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["method"] = one_op[5]
                result["user"] = dao.get_user_by_id(one_op[6])
                if type(one_op[7]) in (str, unicode):
                    result["create"] = string2date(str(one_op[7]))
                else:
                    result["create"] = one_op[7]
                if type(one_op[8]) in (str, unicode):
                    result["start"] = string2date(str(one_op[8]))
                else:
                    result["start"] = one_op[8]
                if type(one_op[9]) in (str, unicode):
                    result["complete"] = string2date(str(one_op[9]))
                else:
                    result["complete"] = one_op[9]

                if result["complete"] is not None and result["start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] - result["start"])
                result["status"] = one_op[10]
                result["additional"] = one_op[11]
                result["visible"] = True if one_op[12] > 0 else False
                result['operation_tag'] = one_op[13]
                result['figures'] = None
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(result['id'])
                    result['results'] = [dao.get_generic_entity(dt.module + '.' + dt.type,
                                                                dt.gid, 'gid')[0] for dt in datatype_results]
                    operation_figures = dao.get_figures_for_operation(result['id'])

                    # Compute the full path to the figure / image on disk
                    for figure in operation_figures:
                        figures_folder = self.structure_helper.get_images_folder(figure.project.name)
                        figure_full_path = os.path.join(figures_folder, figure.file_path)
                        # Compute the path available from browser
                        figure.figure_path = utils.path2url_part(figure_full_path)

                    result['figures'] = operation_figures
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                ## We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception("Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no
示例#26
0
 def get_user_by_id(user_id):
     """
     Retrieves a user by its id.
     """
     return dao.get_user_by_id(user_id)
示例#27
0
    def retrieve_project_full(self,
                              project_id,
                              applied_filters=None,
                              current_page=1):
        """
        Return a Tuple with Project entity and Operations for current Project.
        :param project_id: Current Project Identifier
        :param applied_filters: Filters to apply on Operations
        :param current_page: Number for current page in operations
        """
        selected_project = self.find_project(project_id)
        total_filtered = self.count_filtered_operations(
            project_id, applied_filters)
        pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (
            1 if total_filtered % OPERATIONS_PAGE_SIZE else 0)
        total_ops_nr = self.count_filtered_operations(project_id)

        start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1)
        current_ops = dao.get_filtered_operations(project_id, applied_filters,
                                                  start_idx,
                                                  OPERATIONS_PAGE_SIZE)
        if current_ops is None:
            return selected_project, 0, [], 0

        operations = []
        for one_op in current_ops:
            try:
                result = {}
                if one_op[0] != one_op[1]:
                    result["id"] = str(one_op[0]) + "-" + str(one_op[1])
                else:
                    result["id"] = str(one_op[0])
                burst = dao.get_burst_for_operation_id(one_op[0])
                result["burst_name"] = burst.name if burst else '-'
                result["count"] = one_op[2]
                result["gid"] = one_op[13]
                operation_group_id = one_op[3]
                if operation_group_id is not None and operation_group_id:
                    try:
                        operation_group = dao.get_generic_entity(
                            OperationGroup, operation_group_id)[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(
                            operation_group_id)
                        result[
                            "datatype_group_gid"] = datatype_group.gid if datatype_group is not None else None
                        result["gid"] = operation_group.gid
                        # Filter only viewers for current DataTypeGroup entity:

                        if datatype_group is None:
                            view_groups = None
                        else:
                            view_groups = AlgorithmService(
                            ).get_visualizers_for_group(datatype_group.gid)
                        result["view_groups"] = view_groups
                    except Exception:
                        self.logger.exception(
                            "We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["user"] = dao.get_user_by_id(one_op[5])
                if type(one_op[6]) is str:
                    result["create"] = string2date(str(one_op[6]))
                else:
                    result["create"] = one_op[6]
                if type(one_op[7]) is str:
                    result["start"] = string2date(str(one_op[7]))
                else:
                    result["start"] = one_op[7]
                if type(one_op[8]) is str:
                    result["complete"] = string2date(str(one_op[8]))
                else:
                    result["complete"] = one_op[8]

                if result["complete"] is not None and result[
                        "start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] -
                                                          result["start"])
                result["status"] = one_op[9]
                result["additional"] = one_op[10]
                result["visible"] = True if one_op[11] > 0 else False
                result['operation_tag'] = one_op[12]
                if not result['group']:
                    result['results'] = dao.get_results_for_operation(
                        result['id'])
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                # We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception(
                    "Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no
    def retrieve_project_full(self, project_id, applied_filters=None, current_page=1):
        """
        Return a Tuple with Project entity and Operations for current Project.
        :param project_id: Current Project Identifier
        :param applied_filters: Filters to apply on Operations
        :param current_page: Number for current page in operations
        """
        selected_project = self.find_project(project_id)
        total_filtered = self.count_filtered_operations(project_id, applied_filters)
        start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1)

        if total_filtered >= start_idx + OPERATIONS_PAGE_SIZE:
            end_idx = OPERATIONS_PAGE_SIZE
        else:
            end_idx = total_filtered - start_idx

        pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0)
        total_ops_nr = self.count_filtered_operations(project_id)
        current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, end_idx)
        started_ops = 0
        if current_ops is None:
            return selected_project, [], 0
        operations = []
        for one_op in current_ops:
            try:
                result = {}
                if one_op[0] != one_op[1]:
                    result["id"] = str(one_op[0]) + "-" + str(one_op[1])
                else:
                    result["id"] = str(one_op[0])
                burst = dao.get_burst_for_operation_id(one_op[0])
                result["burst_name"] = burst.name if burst else '-'
                result["count"] = one_op[2]
                result["gid"] = one_op[14]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(model.OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3])
                        datatype = dao.get_datatype_by_id(datatype_group.id)
                        result["datatype_group_gid"] = datatype.gid
                        result["gid"] = operation_group.gid
                        
                        all_categs = dao.get_algorithm_categories()
                        view_categ = dao.get_visualisers_categories()[0]
                        excludes = [categ.id for categ in all_categs if categ.id != view_categ.id]
                        algo = self.retrieve_launchers("DataTypeGroup", datatype.gid,
                                                       exclude_categories=excludes).values()[0]

                        view_groups = []
                        for algo in algo.values():
                            url = '/flow/' + str(algo['category']) + '/' + str(algo['id'])
                            if algo['part_of_group']:
                                url = '/flow/prepare_group_launch/' + datatype.gid + '/' + \
                                      str(algo['category']) + '/' + str(algo['id'])
                            view_groups.append(dict(name=algo["displayName"],
                                                    url=url,
                                                    param_name=algo['children'][0]['param_name'],
                                                    part_of_group=algo['part_of_group']))
                        result["view_groups"] = view_groups

                    except Exception, excep:
                        self.logger.error(excep)
                        self.logger.warning("Will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["method"] = one_op[5]
                result["user"] = dao.get_user_by_id(one_op[6])
                if type(one_op[7]) in (str, unicode):
                    result["create"] = string2date(str(one_op[7]))
                else:
                    result["create"] = one_op[7]
                if type(one_op[8]) in (str, unicode):
                    result["start"] = string2date(str(one_op[8]))
                else:
                    result["start"] = one_op[8]
                if type(one_op[9]) in (str, unicode):
                    result["complete"] = string2date(str(one_op[9]))
                else:
                    result["complete"] = one_op[9]

                if result["complete"] is not None and result["start"] is not None:
                    result["duration"] = timedelta2string(result["complete"] - result["start"])
                result["status"] = one_op[10]
                if result["status"] == model.STATUS_STARTED:
                    started_ops += 1
                result["additional"] = one_op[11]
                result["visible"] = True if one_op[12] > 0 else False
                result['operation_tag'] = one_op[13]
                result['figures'] = None
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(result['id'])
                    result['results'] = [dao.get_generic_entity(dt.module + '.' + dt.type,
                                                                dt.gid, 'gid')[0] for dt in datatype_results]
                    operation_figures = dao.get_figures_for_operation(result['id'])

                    # Compute the full path to the figure / image on disk
                    for figure in operation_figures:
                        figures_folder = self.structure_helper.get_images_folder(figure.project.name,
                                                                                 figure.operation.id)
                        figure_full_path = os.path.join(figures_folder, figure.file_path)
                        # Compute the path available from browser 
                        figure.figure_path = utils.path2url_part(figure_full_path)

                    result['figures'] = operation_figures
                else:
                    result['results'] = None
                operations.append(result)