Ejemplo n.º 1
0
 def test_count_datatypes_in_group(self):
     """ Test that counting dataTypes is correct. Happy flow."""
     _, dt_group_id, first_dt, _ = self._create_datatype_group()
     count = dao.count_datatypes_in_group(dt_group_id)
     assert count == 2
     count = dao.count_datatypes_in_group(first_dt.id)
     assert count == 0, "There should be no dataType."
 def test_count_datatypes_in_group(self):
     """ Test that counting dataTypes is correct. Happy flow."""
     _, dt_group_id, first_dt, _ = self._create_datatype_group()
     count = dao.count_datatypes_in_group(dt_group_id)
     self.assertEqual(count, 2)
     count = dao.count_datatypes_in_group(first_dt.id)
     self.assertEqual(count, 0, "There should be no dataType.")
Ejemplo n.º 3
0
 def test_count_datatypes_in_group(self, datatype_group_factory):
     """ Test that counting dataTypes is correct. Happy flow."""
     group = datatype_group_factory()
     count = dao.count_datatypes_in_group(group.id)
     assert count == 9
     datatypes = dao.get_datatypes_from_datatype_group(group.id)
     count = dao.count_datatypes_in_group(datatypes[0].id)
     assert count == 0, "There should be no dataType."
Ejemplo n.º 4
0
    def test_tvb_export_for_datatype_group_with_links(self,
                                                      datatype_group_factory):
        """
        This method checks export of a data type group with Links
        """
        ts_datatype_group, dm_datatype_group = datatype_group_factory(
            project=self.test_project,
            store_vm=True,
            use_time_series_region=True)
        file_name, file_path, _ = self.export_manager.export_data(
            ts_datatype_group, self.TVB_LINKED_EXPORTER, self.test_project)

        assert file_name is not None, "Export process should return a file name"
        assert file_path is not None, "Export process should return path to export file"
        assert os.path.exists(
            file_path), "Could not find export file: %s on disk." % file_path

        # Now check if the generated file is a correct ZIP file
        assert zipfile.is_zipfile(
            file_path), "Generated file is not a valid ZIP file"

        with closing(zipfile.ZipFile(file_path)) as zip_file:
            list_of_files = zip_file.namelist()

            list_of_folders = []
            links_folder_found = False
            for file in list_of_files:
                dir_name = os.path.dirname(file)
                if not links_folder_found:
                    if "Links" in dir_name:
                        links_folder_found = True
                        assert file_path is not None, "Export process should return path to export file"

                if dir_name not in list_of_folders:
                    list_of_folders.append(dir_name)

            assert links_folder_found is not None, "Links folder was not exported"

            count_datatypes = dao.count_datatypes_in_group(
                ts_datatype_group.id)
            count_datatypes += dao.count_datatypes_in_group(
                dm_datatype_group.id)

            # Check if ZIP files contains files for data types and view models (multiple H5 files in case of a Sim)
            # +1 For Links folder
            assert count_datatypes + 1 == len(list_of_folders)
            # +3 for the 3 files in Links folder: Connectivity, Surface, Region Mapping
            # time series have 6 files, datatype measures have 2 files
            assert (count_datatypes / 2) * 6 + (
                count_datatypes / 2) * 2 + 3 == len(list_of_files)
Ejemplo n.º 5
0
    def test_tvb_export_for_datatype_group(self):
        """
        This method checks export of a data type group
        """
        datatype_group = self.datatypeFactory.create_datatype_group()
        file_name, file_path, _ = self.export_manager.export_data(
            datatype_group, self.TVB_EXPORTER, self.project)

        self.assertTrue(file_name is not None,
                        "Export process should return a file name")
        self.assertTrue(file_path is not None,
                        "Export process should return path to export file")
        self.assertTrue(os.path.exists(file_path),
                        "Could not find export file: %s on disk." % file_path)

        # Now check if the generated file is a correct ZIP file
        self.assertTrue(zipfile.is_zipfile(file_path),
                        "Generated file is not a valid ZIP file")

        with closing(zipfile.ZipFile(file_path)) as zip_file:
            list_of_files = zip_file.namelist()

            count_datatypes = dao.count_datatypes_in_group(datatype_group.id)

            # Check if ZIP files contains files for data types + operation
            self.assertEqual(
                count_datatypes * 2, len(list_of_files),
                "Should have 2 x nr datatypes files, one for operations one for datatypes"
            )
Ejemplo n.º 6
0
    def get_operation_details(self, operation_gid, is_group):
        """
        :returns: an entity OperationOverlayDetails filled with all information for current operation details.
        """

        if is_group:
            operation_group = self.get_operation_group_by_gid(operation_gid)
            operation = dao.get_operations_in_group(operation_group.id, False, True)
            # Reload, to make sure all attributes lazy are populated as well.
            operation = dao.get_operation_by_gid(operation.gid)
            no_of_op_in_group = dao.get_operations_in_group(operation_group.id, is_count=True)
            datatype_group = self.get_datatypegroup_by_op_group_id(operation_group.id)
            count_result = dao.count_datatypes_in_group(datatype_group.id)

        else:
            operation = dao.get_operation_by_gid(operation_gid)
            if operation is None:
                return None
            no_of_op_in_group = 1
            count_result = dao.count_resulted_datatypes(operation.id)

        user_display_name = dao.get_user_by_id(operation.fk_launched_by).display_name
        burst = dao.get_burst_for_operation_id(operation.id)
        datatypes_param, all_special_params = self._review_operation_inputs(operation.gid)

        op_pid = dao.get_operation_process_for_operation(operation.id)
        op_details = OperationOverlayDetails(operation, user_display_name, len(datatypes_param),
                                             count_result, burst, no_of_op_in_group, op_pid)

        # Add all parameter which are set differently by the user on this Operation.
        if all_special_params is not None:
            op_details.add_scientific_fields(all_special_params)
        return op_details
Ejemplo n.º 7
0
    def test_tvb_export_for_datatype_group(self, datatype_group_factory):
        """
        This method checks export of a data type group
        """
        datatype_group = datatype_group_factory(project=self.test_project,
                                                store_vm=True)
        file_name, file_path, _ = self.export_manager.export_data(
            datatype_group, self.TVB_EXPORTER, self.test_project)

        assert file_name is not None, "Export process should return a file name"
        assert file_path is not None, "Export process should return path to export file"
        assert os.path.exists(
            file_path), "Could not find export file: %s on disk." % file_path

        # Now check if the generated file is a correct ZIP file
        assert zipfile.is_zipfile(
            file_path), "Generated file is not a valid ZIP file"

        with closing(zipfile.ZipFile(file_path)) as zip_file:
            list_of_files = zip_file.namelist()

            list_of_folders = []
            for file in list_of_files:
                dir_name = os.path.dirname(file)
                if dir_name not in list_of_folders:
                    list_of_folders.append(dir_name)

            count_datatypes = dao.count_datatypes_in_group(datatype_group.id)

            # Check if ZIP files contains files for data types and view models (multiple H5 files in case of a Sim)
            assert count_datatypes == len(list_of_folders)
            assert count_datatypes * 6 == len(list_of_files)
Ejemplo n.º 8
0
    def get_operation_details(self, operation_gid, is_group):
        """
        :returns: an entity OperationOverlayDetails filled with all information for current operation details.
        """

        if is_group:
            operation_group = self.get_operation_group_by_gid(operation_gid)
            operation = dao.get_operations_in_group(operation_group.id, False, True)
            ## Reload, to make sure all attributes lazy are populated as well.
            operation = dao.get_operation_by_gid(operation.gid)
            no_of_op_in_group = dao.get_operations_in_group(operation_group.id, is_count=True)
            datatype_group = self.get_datatypegroup_by_op_group_id(operation_group.id)
            count_result = dao.count_datatypes_in_group(datatype_group.id)

        else:
            operation = dao.get_operation_by_gid(operation_gid)
            if operation is None:
                return None
            no_of_op_in_group = 1
            count_result = dao.count_resulted_datatypes(operation.id)

        username = dao.get_user_by_id(operation.fk_launched_by).username
        burst = dao.get_burst_for_operation_id(operation.id)
        datatypes_param, all_special_params = ProjectService._review_operation_inputs(operation.gid)

        op_pid = dao.get_operation_process_for_operation(operation.id)
        op_details = OperationOverlayDetails(operation, username, len(datatypes_param),
                                             count_result, burst, no_of_op_in_group, op_pid)

        ## Add all parameter which are set differently by the user on this Operation.
        if all_special_params is not None:
            op_details.add_scientific_fields(all_special_params)
        return op_details
Ejemplo n.º 9
0
 def _update_dt_groups(self, project_id):
     dt_groups = dao.get_datatypegroup_for_project(project_id)
     for dt_group in dt_groups:
         dt_group.count_results = dao.count_datatypes_in_group(dt_group.id)
         dts_in_group = dao.get_datatypes_from_datatype_group(dt_group.id)
         if dts_in_group:
             dt_group.fk_parent_burst = dts_in_group[0].fk_parent_burst
         dao.store_entity(dt_group)
Ejemplo n.º 10
0
    def mark_burst_finished(self,
                            burst_entity,
                            burst_status=None,
                            error_message=None,
                            store_h5_file=True):
        """
        Mark Burst status field.
        Also compute 'weight' for current burst: no of operations inside, estimate time on disk...

        :param burst_entity: BurstConfiguration to be updated, at finish time.
        :param burst_status: BurstConfiguration status. By default BURST_FINISHED
        :param error_message: If given, set the status to error and perpetuate the message.
        """
        if burst_status is None:
            burst_status = BurstConfiguration.BURST_FINISHED
        if error_message is not None:
            burst_status = BurstConfiguration.BURST_ERROR

        try:
            # If there are any DataType Groups in current Burst, update their counter.
            burst_dt_groups = dao.get_generic_entity(DataTypeGroup,
                                                     burst_entity.gid,
                                                     "fk_parent_burst")
            for dt_group in burst_dt_groups:
                dt_group.count_results = dao.count_datatypes_in_group(
                    dt_group.id)
                dt_group.disk_size, dt_group.subject = dao.get_summary_for_group(
                    dt_group.id)
                dao.store_entity(dt_group)

            # Update actual Burst entity fields
            burst_entity.datatypes_number = dao.count_datatypes_in_burst(
                burst_entity.gid)

            burst_entity.status = burst_status
            burst_entity.error_message = error_message
            burst_entity.finish_time = datetime.now()
            dao.store_entity(burst_entity)
            if store_h5_file:
                self.store_burst_configuration(burst_entity)
        except Exception:
            self.logger.exception(
                "Could not correctly update Burst status and meta-data!")
            burst_entity.status = burst_status
            burst_entity.error_message = "Error when updating Burst Status"
            burst_entity.finish_time = datetime.now()
            dao.store_entity(burst_entity)
            if store_h5_file:
                self.store_burst_configuration(burst_entity)
 def mark_burst_finished(self, burst_entity, error=False, success=False, cancel=False, error_message=None):
     """
     Mark Burst status field.
     Also compute 'weight' for current burst: no of operations inside, estimate time on disk...
     
     :param burst_entity: BurstConfiguration to be updated, at finish time.
     :param error: When True, burst will be marked as finished with error.
     :param success: When True, burst will be marked successfully.
     :param cancel: When True, burst will be marked as user-canceled.
     """
     try:
         linked_ops_number = dao.get_operations_in_burst(burst_entity.id, is_count=True)
         linked_datatypes = dao.get_generic_entity(model.DataType, burst_entity.id, "fk_parent_burst")
         
         disk_size = linked_ops_number   # 1KB for each dataType, considered for operation.xml files
         dt_group_sizes = dict()
         for dtype in linked_datatypes:
             if dtype.disk_size is not None:
                 disk_size = disk_size + dtype.disk_size
                 ### Prepare and compute DataTypeGroup sizes, in case of ranges.
                 if dtype.fk_datatype_group:
                     previous_group_size = dt_group_sizes[dtype.fk_datatype_group] if (dtype.fk_datatype_group 
                                                                                       in dt_group_sizes) else 0
                     dt_group_sizes[dtype.fk_datatype_group] = previous_group_size + dtype.disk_size
                          
         ### If there are any DataType Groups in current Burst, update their counter.
         burst_dt_groups = dao.get_generic_entity(model.DataTypeGroup, burst_entity.id, "fk_parent_burst")
         if len(burst_dt_groups) > 0:
             for dt_group in burst_dt_groups:
                 dt_group.count_results = dao.count_datatypes_in_group(dt_group.id)
                 dt_group.disk_size = dt_group_sizes[dt_group.id] if (dt_group.id in dt_group_sizes) else 0
                 dao.store_entity(dt_group)
                 
         ### Update actual Burst entity fields    
         burst_entity.disk_size = disk_size          # In KB
         burst_entity.datatypes_number = len(linked_datatypes) 
         burst_entity.workflows_number = len(dao.get_workflows_for_burst(burst_entity.id))  
         burst_entity.mark_status(success=success, error=error, cancel=cancel)
         burst_entity.error_message = error_message
         
         dao.store_entity(burst_entity)
     except Exception, excep:
         self.logger.error(excep)
         self.logger.exception("Could not correctly update Burst status and meta-data!")
         burst_entity.mark_status(error=True)
         burst_entity.error_message = "Error when updating Burst Status"
         dao.store_entity(burst_entity)
Ejemplo n.º 12
0
    def test_tvb_export_for_datatype_group(self, datatype_group_factory):
        """
        This method checks export of a data type group
        """
        datatype_group = datatype_group_factory(project=self.test_project)
        file_name, file_path, _ = self.export_manager.export_data(datatype_group, self.TVB_EXPORTER, self.test_project)

        assert file_name is not None, "Export process should return a file name"
        assert file_path is not None, "Export process should return path to export file"
        assert os.path.exists(file_path), "Could not find export file: %s on disk." % file_path

        # Now check if the generated file is a correct ZIP file
        assert zipfile.is_zipfile(file_path), "Generated file is not a valid ZIP file"

        with closing(zipfile.ZipFile(file_path)) as zip_file:
            list_of_files = zip_file.namelist()

            count_datatypes = dao.count_datatypes_in_group(datatype_group.id)

            # Check if ZIP files contains files for data types
            assert count_datatypes == len(list_of_files)
Ejemplo n.º 13
0
    def mark_burst_finished(self, burst_entity, burst_status=None, error_message=None):
        """
        Mark Burst status field.
        Also compute 'weight' for current burst: no of operations inside, estimate time on disk...
        
        :param burst_entity: BurstConfiguration to be updated, at finish time.
        :param burst_status: BurstConfiguration status. By default BURST_FINISHED
        :param error_message: If given, set the status to error and perpetuate the message.
        """
        if burst_status is None:
            burst_status = model.BurstConfiguration.BURST_FINISHED
        if error_message is not None:
            burst_status = model.BurstConfiguration.BURST_ERROR

        try:
            ### If there are any DataType Groups in current Burst, update their counter.
            burst_dt_groups = dao.get_generic_entity(model.DataTypeGroup, burst_entity.id, "fk_parent_burst")
            for dt_group in burst_dt_groups:
                dt_group.count_results = dao.count_datatypes_in_group(dt_group.id)
                dt_group.disk_size, dt_group.subject = dao.get_summary_for_group(dt_group.id)
                dao.store_entity(dt_group)

            ### Update actual Burst entity fields
            ##  1KB for each dataType, considered for operation.xml files
            linked_ops_number = dao.get_operations_in_burst(burst_entity.id, is_count=True)
            burst_entity.disk_size = linked_ops_number + dao.get_disk_size_for_burst(burst_entity.id)        # In KB
            burst_entity.datatypes_number = dao.count_datatypes_in_burst(burst_entity.id)
            burst_entity.workflows_number = dao.get_workflows_for_burst(burst_entity.id, is_count=True)

            burst_entity.status = burst_status
            burst_entity.error_message = error_message
            burst_entity.finish_time = datetime.now()
            dao.store_entity(burst_entity)
        except Exception:
            self.logger.exception("Could not correctly update Burst status and meta-data!")
            burst_entity.status = burst_status
            burst_entity.error_message = "Error when updating Burst Status"
            burst_entity.finish_time = datetime.now()
            dao.store_entity(burst_entity)
Ejemplo n.º 14
0
 def test_tvb_export_for_datatype_group(self):
     """
     This method checks export of a data type group
     """
     datatype_group = self.datatypeFactory.create_datatype_group()       
     file_name, file_path, _ = self.export_manager.export_data(datatype_group, self.TVB_EXPORTER, self.project)
     
     self.assertTrue(file_name is not None, "Export process should return a file name")
     self.assertTrue(file_path is not None, "Export process should return path to export file")
     self.assertTrue(os.path.exists(file_path), "Could not find export file: %s on disk." % file_path)
     
     # Now check if the generated file is a correct ZIP file
     self.assertTrue(zipfile.is_zipfile(file_path), "Generated file is not a valid ZIP file")
     
     with closing(zipfile.ZipFile(file_path)) as zip_file:
         list_of_files = zip_file.namelist()
 
         count_datatypes = dao.count_datatypes_in_group(datatype_group.id)
         
         # Check if ZIP files contains files for data types + operation
         self.assertEqual(count_datatypes * 2, len(list_of_files), 
                          "Should have 2 x nr datatypes files, one for operations one for datatypes")
Ejemplo n.º 15
0
    def mark_burst_finished(self,
                            burst_entity,
                            error=False,
                            success=False,
                            cancel=False,
                            error_message=None):
        """
        Mark Burst status field.
        Also compute 'weight' for current burst: no of operations inside, estimate time on disk...
        
        :param burst_entity: BurstConfiguration to be updated, at finish time.
        :param error: When True, burst will be marked as finished with error.
        :param success: When True, burst will be marked successfully.
        :param cancel: When True, burst will be marked as user-canceled.
        """
        try:
            linked_ops_number = dao.get_operations_in_burst(burst_entity.id,
                                                            is_count=True)
            linked_datatypes = dao.get_generic_entity(model.DataType,
                                                      burst_entity.id,
                                                      "fk_parent_burst")

            disk_size = linked_ops_number  #### 1KB for each dataType, considered for operation.xml files
            dt_group_sizes = dict()
            for dtype in linked_datatypes:
                if dtype.disk_size is not None:
                    disk_size = disk_size + dtype.disk_size
                    ### Prepare and compute DataTypeGroup sizes, in case of ranges.
                    if dtype.fk_datatype_group:
                        previous_group_size = dt_group_sizes[
                            dtype.fk_datatype_group] if (
                                dtype.fk_datatype_group
                                in dt_group_sizes) else 0
                        dt_group_sizes[
                            dtype.
                            fk_datatype_group] = previous_group_size + dtype.disk_size

            ### If there are any DataType Groups in current Burst, update their counter.
            burst_dt_groups = dao.get_generic_entity(model.DataTypeGroup,
                                                     burst_entity.id,
                                                     "fk_parent_burst")
            if len(burst_dt_groups) > 0:
                for dt_group in burst_dt_groups:
                    dt_group.count_results = dao.count_datatypes_in_group(
                        dt_group.id)
                    dt_group.disk_size = dt_group_sizes[dt_group.id] if (
                        dt_group.id in dt_group_sizes) else 0
                    dao.store_entity(dt_group)

            ### Update actual Burst entity fields
            burst_entity.disk_size = disk_size  ## In KB
            burst_entity.datatypes_number = len(linked_datatypes)
            burst_entity.workflows_number = len(
                dao.get_workflows_for_burst(burst_entity.id))
            burst_entity.mark_status(success=success,
                                     error=error,
                                     cancel=cancel)
            burst_entity.error_message = error_message

            dao.store_entity(burst_entity)
        except Exception, excep:
            self.logger.error(excep)
            self.logger.exception(
                "Could not correctly update Burst status and meta-data!")
            burst_entity.mark_status(error=True)
            burst_entity.error_message = "Error when updating Burst Status"
            dao.store_entity(burst_entity)