def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        assert len(all_operations) == 0, "There should be no operation"

        adapter_instance = TestFactory.create_adapter('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        assert len(all_operations) == 1, "Expected one operation group"
        assert all_operations[0][2] == 2, "Expected 2 operations in group"

        operation_group_id = all_operations[0][3]
        assert operation_group_id != None, "The operation should be part of a group."

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
        assert len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
        assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect"
    def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 0, "There should be no operation")

        algogroup = dao.find_group("tvb.tests.framework.adapters.testadapter3", "TestAdapter3")
        group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup)
        adapter_instance = flow_service.build_adapter_instance(group)
        data = {model.RANGE_PARAMETER_1: "param_5", "param_5": [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 1, "Expected one operation group")
        self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group")

        operation_group_id = all_operations[0][3]
        self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.")

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
        self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
        self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect")
Exemplo n.º 3
0
    def load_burst(self, burst_id):
        """
        :param burst_id: the id of the burst that should be loaded
        
        Having this input the method should:
        
            - load the entity from the DB
            - get all the workflow steps for the saved burst id
            - go trough the visualization workflow steps to create the tab 
                configuration of the burst using the tab_index and index_in_tab 
                fields saved on each workflow_step
                
        """
        burst = dao.get_burst_by_id(burst_id)
        burst.prepare_after_load()
        burst.reset_tabs()
        burst_workflows = dao.get_workflows_for_burst(burst.id)

        group_gid = None
        if len(burst_workflows) == 1:
            # A simple burst with no range parameters
            burst = self.__populate_tabs_from_workflow(burst,
                                                       burst_workflows[0])
        elif len(burst_workflows) > 1:
            # A burst workflow with a range of values, created multiple workflows and need
            # to launch parameter space exploration with the resulted group
            self.__populate_tabs_from_workflow(burst, burst_workflows[0])
            executed_steps = dao.get_workflow_steps(burst_workflows[0].id)

            operation = dao.get_operation_by_id(executed_steps[0].fk_operation)
            if operation.operation_group:
                workflow_group = dao.get_datatypegroup_by_op_group_id(
                    operation.operation_group.id)
                group_gid = workflow_group.gid
        return burst, group_gid
Exemplo n.º 4
0
    def load_burst(self, burst_id):
        """
        :param burst_id: the id of the burst that should be loaded
        
        Having this input the method should:
        
            - load the entity from the DB
            - get all the workflow steps for the saved burst id
            - go trough the visualization workflow steps to create the tab 
                configuration of the burst using the tab_index and index_in_tab 
                fields saved on each workflow_step
                
        """
        burst = dao.get_burst_by_id(burst_id)
        burst.prepare_after_load()
        burst.reset_tabs()
        burst_workflows = dao.get_workflows_for_burst(burst.id)

        group_gid = None
        if len(burst_workflows) == 1:
            # A simple burst with no range parameters
            burst = self.__populate_tabs_from_workflow(burst, burst_workflows[0])
        elif len(burst_workflows) > 1:
            # A burst workflow with a range of values, created multiple workflows and need
            # to launch parameter space exploration with the resulted group
            self.__populate_tabs_from_workflow(burst, burst_workflows[0])
            executed_steps = dao.get_workflow_steps(burst_workflows[0].id)

            operation = dao.get_operation_by_id(executed_steps[0].fk_operation)
            if operation.operation_group:
                workflow_group = dao.get_datatypegroup_by_op_group_id(operation.operation_group.id)
                group_gid = workflow_group.gid
        return burst, group_gid
Exemplo n.º 5
0
    def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 0, "There should be no operation")

        algogroup = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup)
        adapter_instance = flow_service.build_adapter_instance(group)
        data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 1, "Expected one operation group")
        self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group")

        operation_group_id = all_operations[0][3]
        self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.")

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
        self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
        self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect")
Exemplo n.º 6
0
    def import_operation(operation_entity, migration=False):
        """
        Store a Operation entity.
        """
        do_merge = False
        if operation_entity.id:
            do_merge = True
        operation_entity = dao.store_entity(operation_entity, merge=do_merge)
        operation_group_id = operation_entity.fk_operation_group
        datatype_group = None

        if operation_group_id is not None:
            datatype_group = dao.get_datatypegroup_by_op_group_id(
                operation_group_id)

            if datatype_group is None and migration is False:
                # If no dataType group present for current op. group, create it.
                operation_group = dao.get_operationgroup_by_id(
                    operation_group_id)
                datatype_group = DataTypeGroup(
                    operation_group, operation_id=operation_entity.id)
                datatype_group.state = UploadAlgorithmCategoryConfig.defaultdatastate
                datatype_group = dao.store_entity(datatype_group)

        return operation_entity, datatype_group
Exemplo n.º 7
0
    def prepare_indexes_for_simulation_results(self, operation, result_filenames, burst):
        indexes = list()
        self.logger.debug("Preparing indexes for simulation results in operation {}...".format(operation.id))
        for filename in result_filenames:
            try:
                self.logger.debug("Preparing index for filename: {}".format(filename))
                index = h5.index_for_h5_file(filename)()
                h5_class = h5.REGISTRY.get_h5file_for_index(type(index))

                with h5_class(filename) as index_h5:
                    index.fill_from_h5(index_h5)
                    index.fill_from_generic_attributes(index_h5.load_generic_attributes())

                index.fk_parent_burst = burst.gid
                index.fk_from_operation = operation.id
                if operation.fk_operation_group:
                    datatype_group = dao.get_datatypegroup_by_op_group_id(operation.fk_operation_group)
                    self.logger.debug(
                        "Found DatatypeGroup with id {} for operation {}".format(datatype_group.id, operation.id))
                    index.fk_datatype_group = datatype_group.id

                    # Update the operation group name
                    operation_group = dao.get_operationgroup_by_id(operation.fk_operation_group)
                    operation_group.fill_operationgroup_name("TimeSeriesRegionIndex")
                    dao.store_entity(operation_group)
                self.logger.debug(
                    "Prepared index {} for file {} in operation {}".format(index.summary_info, filename, operation.id))
                indexes.append(index)
            except Exception as e:
                self.logger.debug("Skip preparing index {} because there was an error.".format(filename))
                self.logger.error(e)
        self.logger.debug("Prepared {} indexes for results in operation {}...".format(len(indexes), operation.id))
        return indexes
Exemplo n.º 8
0
    def test_datatypes_groups(self, test_adapter_factory):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        # TODO: re-write this to use groups correctly
        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        assert len(all_operations) == 0, "There should be no operation"

        algo = test_adapter_factory(TestAdapter3)
        adapter_instance = ABCAdapter.build_adapter(algo)
        data = {model_burst.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
        ## Create Group of operations
        FlowService().fire_operation(adapter_instance, self.test_user, self.test_project.id)

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        assert len(all_operations) == 1, "Expected one operation group"
        assert all_operations[0][2] == 2, "Expected 2 operations in group"

        operation_group_id = all_operations[0][3]
        assert operation_group_id != None, "The operation should be part of a group."

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
        assert len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
        assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect"
Exemplo n.º 9
0
 def prepare_indexes_for_simulation_results(self, operation,
                                            result_filenames, burst):
     indexes = list()
     self.logger.debug(
         "Preparing indexes for simulation results in operation {}...".
         format(operation.id))
     for filename in result_filenames:
         index = h5.index_for_h5_file(filename)()
         # TODO: don't load full TS in memory and make this read nicer
         datatype, ga = h5.load_with_references(filename)
         index.fill_from_has_traits(datatype)
         index.fill_from_generic_attributes(ga)
         index.fk_parent_burst = burst.gid
         index.fk_from_operation = operation.id
         if operation.fk_operation_group:
             datatype_group = dao.get_datatypegroup_by_op_group_id(
                 operation.fk_operation_group)
             self.logger.debug(
                 "Found DatatypeGroup with id {} for operation {}".format(
                     datatype_group.id, operation.id))
             index.fk_datatype_group = datatype_group.id
         self.logger.debug(
             "Prepared index {} for file {} in operation {}".format(
                 index.summary_info, filename, operation.id))
         indexes.append(index)
     self.logger.debug(
         "Prepared {} indexes for results in operation {}...".format(
             len(indexes), operation.id))
     return indexes
Exemplo n.º 10
0
    def test_get_visualizers_for_group(self):

        _, op_group_id = TestFactory.create_group(self.test_user, self.test_project)
        dt_group = dao.get_datatypegroup_by_op_group_id(op_group_id)
        result = self.flow_service.get_visualizers_for_group(dt_group.gid)
        # Only the discreet is expected
        self.assertEqual(1, len(result))
        self.assertEqual(DISCRETE_PSE_ADAPTER_CLASS, result[0].classname)
Exemplo n.º 11
0
    def test_get_visualizers_for_group(self):

        _, op_group_id = TestFactory.create_group(self.test_user, self.test_project)
        dt_group = dao.get_datatypegroup_by_op_group_id(op_group_id)
        result = self.flow_service.get_visualizers_for_group(dt_group.gid)
        # Only the discreet is expected
        assert 1 == len(result)
        assert IntrospectionRegistry.DISCRETE_PSE_ADAPTER_CLASS == result[0].classname
Exemplo n.º 12
0
    def test_get_visualizers_for_group(self):

        _, op_group_id = TestFactory.create_group(self.test_user,
                                                  self.test_project)
        dt_group = dao.get_datatypegroup_by_op_group_id(op_group_id)
        result = self.flow_service.get_visualizers_for_group(dt_group.gid)
        # Only the discreet is expected
        self.assertEqual(1, len(result))
        self.assertEqual(DISCRETE_PSE_ADAPTER_CLASS, result[0].classname)
Exemplo n.º 13
0
    def test_get_visualizers_for_group(self, datatype_group_factory):

        group = datatype_group_factory()
        dt_group = dao.get_datatypegroup_by_op_group_id(
            group.fk_from_operation)
        result = self.flow_service.get_visualizers_for_group(dt_group.gid)
        # Only the discreet is expected
        assert 1 == len(result)
        assert IntrospectionRegistry.DISCRETE_PSE_ADAPTER_CLASS == result[
            0].classname
Exemplo n.º 14
0
    def test_get_visualizers_for_group(self, datatype_group_factory):

        group = datatype_group_factory()
        dt_group = dao.get_datatypegroup_by_op_group_id(group.fk_from_operation)
        result = self.flow_service.get_visualizers_for_group(dt_group.gid)
        # Both discrete and isocline are expected due to the 2 ranges set in the factory
        assert 2 == len(result)
        result_classnames = [res.classname for res in result]
        assert IntrospectionRegistry.ISOCLINE_PSE_ADAPTER_CLASS in result_classnames
        assert IntrospectionRegistry.DISCRETE_PSE_ADAPTER_CLASS in result_classnames
Exemplo n.º 15
0
    def _capture_operation_results(self, result, user_tag=None):
        """
        After an operation was finished, make sure the results are stored
        in DB storage and the correct meta-data,IDs are set.
        """
        results_to_store = []
        data_type_group_id = None
        operation = dao.get_operation_by_id(self.operation_id)
        if operation.user_group is None or len(operation.user_group) == 0:
            operation.user_group = date2string(datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT)
            operation = dao.store_entity(operation)
        if self._is_group_launch():
            data_type_group_id = dao.get_datatypegroup_by_op_group_id(operation.fk_operation_group).id
        # All entities will have the same subject and state
        subject = self.meta_data[DataTypeMetaData.KEY_SUBJECT]
        state = self.meta_data[DataTypeMetaData.KEY_STATE]
        burst_reference = None
        if DataTypeMetaData.KEY_BURST in self.meta_data:
            burst_reference = self.meta_data[DataTypeMetaData.KEY_BURST]
        perpetuated_identifier = None
        if DataTypeMetaData.KEY_TAG_1 in self.meta_data:
            perpetuated_identifier = self.meta_data[DataTypeMetaData.KEY_TAG_1]

        for res in result:
            if res is None:
                continue
            res.subject = str(subject)
            res.state = state
            res.fk_parent_burst = burst_reference
            res.fk_from_operation = self.operation_id
            res.framework_metadata = self.meta_data
            if not res.user_tag_1:
                res.user_tag_1 = user_tag if user_tag is not None else perpetuated_identifier
            else:
                res.user_tag_2 = user_tag if user_tag is not None else perpetuated_identifier
            res.fk_datatype_group = data_type_group_id
            ## Compute size-on disk, in case file-storage is used
            if hasattr(res, 'storage_path') and hasattr(res, 'get_storage_file_name'):
                associated_file = os.path.join(res.storage_path, res.get_storage_file_name())
                res.close_file()
                res.disk_size = self.file_handler.compute_size_on_disk(associated_file)
            res = dao.store_entity(res)
            # Write metaData
            res.persist_full_metadata()
            results_to_store.append(res)
        del result[0:len(result)]
        result.extend(results_to_store)

        if len(result) and self._is_group_launch():
            ## Update the operation group name
            operation_group = dao.get_operationgroup_by_id(operation.fk_operation_group)
            operation_group.fill_operationgroup_name(result[0].type)
            dao.store_entity(operation_group)

        return 'Operation ' + str(self.operation_id) + ' has finished.', len(results_to_store)
Exemplo n.º 16
0
    def _remove_project_node_files(self, project_id, gid, links, skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)

            if links:
                op = dao.get_operation_by_id(datatype.fk_from_operation)
                # Instead of deleting, we copy the datatype to the linked project
                # We also clone the operation
                new_operation = self.__copy_linked_datatype_before_delete(op, datatype, project,
                                                                          links[0].fk_to_project)

                # If there is a  datatype group and operation group and they were not moved yet to the linked project,
                # then do it
                if datatype.fk_datatype_group is not None:
                    dt_group_op = dao.get_operation_by_id(datatype.fk_from_operation)
                    op_group = dao.get_operationgroup_by_id(dt_group_op.fk_operation_group)
                    op_group.fk_launched_in = links[0].fk_to_project
                    dao.store_entity(op_group)

                    burst = dao.get_burst_for_operation_id(op.id)
                    if burst is not None:
                        burst.fk_project = links[0].fk_to_project
                        dao.store_entity(burst)

                    dt_group = dao.get_datatypegroup_by_op_group_id(op_group.id)
                    dt_group.parent_operation = new_operation
                    dt_group.fk_from_operation = new_operation.id
                    dao.store_entity(dt_group)

            else:
                # There is no link for this datatype so it has to be deleted
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)

                # Remove burst if dt has one and it still exists
                if datatype.fk_parent_burst is not None and datatype.is_ts:
                    burst = dao.get_burst_for_operation_id(datatype.fk_from_operation)

                    if burst is not None:
                        dao.remove_entity(BurstConfiguration, burst.id)

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException("Remove operation failed for unknown reasons.Please contact system administrator.")
    def _create_datatype_group(self):
        """
        Creates a project, one DataTypeGroup with 2 DataTypes into the new group.
        """
        test_project = TestFactory.create_project(self.test_user, "NewProject")

        all_operations = dao.get_filtered_operations(test_project.id, None, is_count=True)
        self.assertEqual(0, all_operations, "There should be no operation.")
        
        datatypes, op_group_id = TestFactory.create_group(self.test_user, test_project)
        dt_group = dao.get_datatypegroup_by_op_group_id(op_group_id)

        return test_project, dt_group.id, datatypes[0], datatypes[1]
Exemplo n.º 18
0
    def _create_datatype_group(self):
        """
        Creates a project, one DataTypeGroup with 2 DataTypes into the new group.
        """
        test_project = TestFactory.create_project(self.test_user, "NewProject")

        all_operations = dao.get_filtered_operations(test_project.id, None, is_count=True)
        assert 0 == all_operations, "There should be no operation."

        datatypes, op_group_id = TestFactory.create_group(self.test_user, test_project)
        dt_group = dao.get_datatypegroup_by_op_group_id(op_group_id)

        return test_project, dt_group.id, datatypes[0], datatypes[1]
Exemplo n.º 19
0
    def _capture_operation_results(self, result):
        """
        After an operation was finished, make sure the results are stored
        in DB storage and the correct meta-data,IDs are set.
        """
        data_type_group_id = None
        operation = dao.get_operation_by_id(self.operation_id)
        if operation.user_group is None or len(operation.user_group) == 0:
            operation.user_group = date2string(
                datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT)
            operation = dao.store_entity(operation)
        if self._is_group_launch():
            data_type_group_id = dao.get_datatypegroup_by_op_group_id(
                operation.fk_operation_group).id
        burst_reference = None
        if DataTypeMetaData.KEY_BURST in self.meta_data:
            burst_reference = self.meta_data[DataTypeMetaData.KEY_BURST]

        count_stored = 0
        group_type = None  # In case of a group, the first not-none type is sufficient to memorize here
        for res in result:
            if res is None:
                continue
            res.subject = self.generic_attributes.subject
            res.state = self.generic_attributes.state
            res.fk_parent_burst = burst_reference
            res.fk_from_operation = self.operation_id
            res.framework_metadata = self.meta_data
            res.user_tag_1 = self.generic_attributes.user_tag_1
            res.user_tag_2 = self.generic_attributes.user_tag_2
            res.fk_datatype_group = data_type_group_id
            # Compute size-on disk, in case file-storage is used
            associated_file = h5.path_for_stored_index(res)
            if os.path.exists(associated_file):
                res.disk_size = self.file_handler.compute_size_on_disk(
                    associated_file)
                with H5File.from_file(associated_file) as f:
                    f.store_generic_attributes(self.generic_attributes)
            dao.store_entity(res)
            group_type = res.type
            count_stored += 1

        if count_stored > 0 and self._is_group_launch():
            # Update the operation group name
            operation_group = dao.get_operationgroup_by_id(
                operation.fk_operation_group)
            operation_group.fill_operationgroup_name(group_type)
            dao.store_entity(operation_group)

        return 'Operation ' + str(
            self.operation_id) + ' has finished.', count_stored
Exemplo n.º 20
0
    def _capture_operation_results(self, result):
        """
        After an operation was finished, make sure the results are stored
        in DB storage and the correct meta-data,IDs are set.
        """
        data_type_group_id = None
        operation = dao.get_operation_by_id(self.operation_id)
        if operation.user_group is None or len(operation.user_group) == 0:
            operation.user_group = date2string(
                datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT)
            operation = dao.store_entity(operation)
        if self._is_group_launch():
            data_type_group_id = dao.get_datatypegroup_by_op_group_id(
                operation.fk_operation_group).id

        count_stored = 0
        if result is None:
            return "", count_stored

        group_type = None  # In case of a group, the first not-none type is sufficient to memorize here
        for res in result:
            if res is None:
                continue
            if not res.fixed_generic_attributes:
                res.fill_from_generic_attributes(self.generic_attributes)
            res.fk_from_operation = self.operation_id
            res.fk_datatype_group = data_type_group_id

            associated_file = h5.path_for_stored_index(res)
            if os.path.exists(associated_file):
                if not res.fixed_generic_attributes:
                    with H5File.from_file(associated_file) as f:
                        f.store_generic_attributes(self.generic_attributes)
                # Compute size-on disk, in case file-storage is used
                res.disk_size = self.storage_interface.compute_size_on_disk(
                    associated_file)

            dao.store_entity(res)
            res.after_store()
            group_type = res.type
            count_stored += 1

        if count_stored > 0 and self._is_group_launch():
            # Update the operation group name
            operation_group = dao.get_operationgroup_by_id(
                operation.fk_operation_group)
            operation_group.fill_operationgroup_name(group_type)
            dao.store_entity(operation_group)

        return 'Operation ' + str(
            self.operation_id) + ' has finished.', count_stored
Exemplo n.º 21
0
 def prepare_index_for_metric_result(self, operation, result_filename, burst):
     self.logger.debug("Preparing index for metric result in operation {}...".format(operation.id))
     index = h5.index_for_h5_file(result_filename)()
     with DatatypeMeasureH5(result_filename) as dti_h5:
         index.gid = dti_h5.gid.load().hex
         index.metrics = json.dumps(dti_h5.metrics.load())
         index.fk_source_gid = dti_h5.analyzed_datatype.load().hex
     index.fk_from_operation = operation.id
     index.fk_parent_burst = burst.gid
     datatype_group = dao.get_datatypegroup_by_op_group_id(operation.fk_operation_group)
     self.logger.debug("Found DatatypeGroup with id {} for operation {}".format(datatype_group.id, operation.id))
     index.fk_datatype_group = datatype_group.id
     self.logger.debug("Prepared index {} for results in operation {}...".format(index.summary_info, operation.id))
     return index
Exemplo n.º 22
0
    def test_datatypes_groups(self, test_adapter_factory,
                              datatype_group_factory):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        all_operations = dao.get_filtered_operations(self.test_project.id,
                                                     None)
        assert len(all_operations) == 0, "There should be no operation"

        dt_group = datatype_group_factory(project=self.test_project)
        model = TestModel()
        test_adapter_factory()
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter1", "TestAdapter1")

        operations = dao.get_operations_in_group(dt_group.id)

        for op in operations:
            model.gid = uuid.uuid4()
            op_path = StorageInterface().get_project_folder(
                self.test_project.name, str(op.id))
            op.view_model_gid = model.gid.hex
            op.algorithm = adapter.stored_adapter
            h5.store_view_model(model, op_path)
            dao.store_entity(op)

        all_operations = dao.get_filtered_operations(self.test_project.id,
                                                     None)
        assert len(all_operations) == 2, "Expected two operation groups"
        assert all_operations[0][2] == 6, "Expected 6 operations in one group"

        operation_group_id = all_operations[0][3]
        assert operation_group_id != None, "The operation should be part of a group."

        self.operation_service.stop_operation(all_operations[1][0])
        self.operation_service.stop_operation(all_operations[1][1])
        # Make sure operations are executed
        self.operation_service.launch_operation(all_operations[1][0], False)
        self.operation_service.launch_operation(all_operations[1][1], False)

        resulted_datatypes = dao.get_datatype_in_group(
            operation_group_id=operation_group_id)
        assert len(
            resulted_datatypes) >= 2, "Expected at least 2, but: " + str(
                len(resulted_datatypes))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(
            operation_group_id)
        assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect"
Exemplo n.º 23
0
    def __import_operation(operation_entity):
        """
        Store a Operation entity.
        """
        operation_entity = dao.store_entity(operation_entity)
        operation_group_id = operation_entity.fk_operation_group
        datatype_group = None

        if operation_group_id is not None:
            try:
                datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
            except SQLAlchemyError:
                # If no dataType group present for current op. group, create it.
                operation_group = dao.get_operationgroup_by_id(operation_group_id)
                datatype_group = model.DataTypeGroup(operation_group, operation_id=operation_entity.id)
                datatype_group.state = ADAPTERS['Upload']['defaultdatastate']
                datatype_group = dao.store_entity(datatype_group)

        return operation_entity, datatype_group
Exemplo n.º 24
0
    def _create_operations_with_inputs(self,
                                       datatype_group,
                                       is_group_parent=False):
        """
        Method used for creating a complex tree of operations.

        If 'if_group_parent' is True then a new group will be created and one of its entries it will be used as
        input for the returned operations.
        """
        group_dts = dao.get_datatypes_from_datatype_group(datatype_group.id)
        if is_group_parent:
            datatype_gid = group_dts[0].gid
        else:
            datatype_gid = self._create_value_wrapper(self.test_user,
                                                      self.test_project)[1]

        parameters = json.dumps({"param_name": datatype_gid})

        ops = []
        for i in range(4):
            ops.append(
                TestFactory.create_operation(test_user=self.test_user,
                                             test_project=self.test_project))
            if i in [1, 3]:
                ops[i].visible = False
            ops[i].parameters = parameters
            ops[i] = dao.store_entity(ops[i])

        # groups
        ops_group = dao.get_operations_in_group(
            datatype_group.fk_from_operation)
        assert 9 == len(ops_group)
        ops_group[0].parameters = parameters
        ops_group[0] = dao.store_entity(ops_group[0])
        ops_group[1].visible = False
        ops_group[1].parameters = parameters
        ops_group[1] = dao.store_entity(ops_group[1])

        ops.extend(ops_group)
        if is_group_parent:
            dt_group = dao.get_datatypegroup_by_op_group_id(datatype_group.id)
            return ops, dt_group.id
        return ops, datatype_gid
Exemplo n.º 25
0
    def __import_operation(operation_entity):
        """
        Store a Operation entity.
        """
        operation_entity = dao.store_entity(operation_entity)
        operation_group_id = operation_entity.fk_operation_group
        datatype_group = None

        if operation_group_id is not None:
            try:
                datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
            except SQLAlchemyError:
                # If no dataType group present for current op. group, create it.
                operation_group = dao.get_operationgroup_by_id(operation_group_id)
                datatype_group = model.DataTypeGroup(operation_group, operation_id=operation_entity.id)
                datatype_group.state = ADAPTERS['Upload']['defaultdatastate']
                datatype_group = dao.store_entity(datatype_group)

        return operation_entity, datatype_group
Exemplo n.º 26
0
    def __import_operation(operation_entity):
        """
        Store a Operation entity.
        """
        operation_entity = dao.store_entity(operation_entity)
        operation_group_id = operation_entity.fk_operation_group
        datatype_group = None

        if operation_group_id is not None:
            try:
                datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
            except SQLAlchemyError:
                # If no dataType group present for current op. group, create it.
                operation_group = dao.get_operationgroup_by_id(operation_group_id)
                datatype_group = DataTypeGroup(operation_group, operation_id=operation_entity.id)
                datatype_group.state = UploadAlgorithmCategoryConfig.defaultdatastate
                datatype_group = dao.store_entity(datatype_group)

        return operation_entity, datatype_group
Exemplo n.º 27
0
    def prepare_datatypes_for_export(data):
        """
        Method used for exporting data type groups. This method returns a list of all datatype indexes needed to be
        exported and a dictionary where keys are operation folder names and the values are lists containing the paths
        that belong to one particular operation folder.
        """
        all_datatypes = ProjectService.get_all_datatypes_from_data(data)
        first_datatype = all_datatypes[0]

        # We are exporting a group of datatype measures so we need to find the group of time series
        if hasattr(first_datatype, 'fk_source_gid'):
            ts = h5.load_entity_by_gid(first_datatype.fk_source_gid)
            dt_metric_group = dao.get_datatypegroup_by_op_group_id(
                ts.parent_operation.fk_operation_group)
            datatype_measure_list = ProjectService.get_all_datatypes_from_data(
                dt_metric_group)
            all_datatypes = datatype_measure_list + all_datatypes
        else:
            ts_group = dao.get_datatype_measure_group_from_ts_from_pse(
                first_datatype.gid, DatatypeMeasureIndex)
            time_series_list = ProjectService.get_all_datatypes_from_data(
                ts_group)
            all_datatypes = all_datatypes + time_series_list

        if all_datatypes is None or len(all_datatypes) == 0:
            raise ExportException(
                "Could not export a data type group with no data!")

        op_file_dict = dict()
        for dt in all_datatypes:
            h5_path = h5.path_for_stored_index(dt)
            op_folder = os.path.dirname(h5_path)
            op_file_dict[op_folder] = [h5_path]

            op = dao.get_operation_by_id(dt.fk_from_operation)
            vms = h5.gather_references_of_view_model(op.view_model_gid,
                                                     os.path.dirname(h5_path),
                                                     only_view_models=True)
            op_file_dict[op_folder].extend(vms[0])

        return all_datatypes, op_file_dict
Exemplo n.º 28
0
    def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id,
                                                     None)
        assert len(all_operations) == 0, "There should be no operation"

        adapter_instance = TestFactory.create_adapter(
            'tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user,
                                    self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id,
                                                     None)
        assert len(all_operations) == 1, "Expected one operation group"
        assert all_operations[0][2] == 2, "Expected 2 operations in group"

        operation_group_id = all_operations[0][3]
        assert operation_group_id != None, "The operation should be part of a group."

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(
            operation_group_id=operation_group_id)
        assert len(
            resulted_datatypes) >= 2, "Expected at least 2, but: " + str(
                len(resulted_datatypes))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(
            operation_group_id)
        assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect"
    def _create_operations_with_inputs(self, is_group_parent=False):
        """
        Method used for creating a complex tree of operations.

        If 'if_group_parent' is True then a new group will be created and one of its entries it will be used as
        input for the returned operations.
        """
        group_dts, root_op_group_id = TestFactory.create_group(self.test_user, self.test_project)
        if is_group_parent:
            datatype_gid = group_dts[0].gid
        else:
            datatype_gid = ProjectServiceTest._create_value_wrapper(self.test_user, self.test_project)[1]

        parameters = json.dumps({"param_name": datatype_gid})

        ops = []
        for i in range(4):
            ops.append(TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project))
            if i in [1, 3]:
                ops[i].visible = False
            ops[i].parameters = parameters
            ops[i] = dao.store_entity(ops[i])
            
        #groups
        _, ops_group = TestFactory.create_group(self.test_user, self.test_project)
        ops_group = dao.get_operations_in_group(ops_group)
        self.assertEqual(2, len(ops_group))
        ops_group[0].parameters = parameters
        ops_group[0] = dao.store_entity(ops_group[0])
        ops_group[1].visible = False
        ops_group[1].parameters = parameters
        ops_group[1] = dao.store_entity(ops_group[1])

        ops.extend(ops_group)
        if is_group_parent:
            dt_group = dao.get_datatypegroup_by_op_group_id(root_op_group_id)
            return ops, dt_group.id
        return ops, datatype_gid
Exemplo n.º 30
0
 def get_datatypegroup_by_op_group_id(operation_group_id):
     """ Returns the DataTypeGroup with the specified id. """
     return dao.get_datatypegroup_by_op_group_id(operation_group_id)
Exemplo n.º 31
0
    def retrieve_project_full(self, project_id, applied_filters=None, current_page=1):
        """
        Return a Tuple with Project entity and Operations for current Project.
        :param project_id: Current Project Identifier
        :param applied_filters: Filters to apply on Operations
        :param current_page: Number for current page in operations
        """
        selected_project = self.find_project(project_id)
        total_filtered = self.count_filtered_operations(project_id, applied_filters)
        pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0)
        total_ops_nr = self.count_filtered_operations(project_id)

        start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1)
        current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, OPERATIONS_PAGE_SIZE)
        if current_ops is None:
            return selected_project, 0, [], 0

        operations = []
        view_categ_id = dao.get_visualisers_categories()[0].id
        for one_op in current_ops:
            try:
                result = {}
                if one_op[0] != one_op[1]:
                    result["id"] = str(one_op[0]) + "-" + str(one_op[1])
                else:
                    result["id"] = str(one_op[0])
                burst = dao.get_burst_for_operation_id(one_op[0])
                result["burst_name"] = burst.name if burst else '-'
                result["count"] = one_op[2]
                result["gid"] = one_op[13]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3])
                        result["datatype_group_gid"] = datatype_group.gid
                        result["gid"] = operation_group.gid
                        ## Filter only viewers for current DataTypeGroup entity:
                        result["view_groups"] = FlowService().get_visualizers_for_group(datatype_group.gid)
                    except Exception:
                        self.logger.exception("We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["user"] = dao.get_user_by_id(one_op[5])
                if type(one_op[6]) is str:
                    result["create"] = string2date(str(one_op[6]))
                else:
                    result["create"] = one_op[6]
                if type(one_op[7]) is str:
                    result["start"] = string2date(str(one_op[7]))
                else:
                    result["start"] = one_op[7]
                if type(one_op[8]) is str:
                    result["complete"] = string2date(str(one_op[8]))
                else:
                    result["complete"] = one_op[8]

                if result["complete"] is not None and result["start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] - result["start"])
                result["status"] = one_op[9]
                result["additional"] = one_op[10]
                result["visible"] = True if one_op[11] > 0 else False
                result['operation_tag'] = one_op[12]
                result['figures'] = None
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(result['id'])
                    result['results'] = []
                    for dt in datatype_results:
                        dt_loaded = ABCAdapter.load_entity_by_gid(dt.gid)
                        if dt_loaded:
                            result['results'].append(dt_loaded)
                        else:
                            self.logger.warning("Could not retrieve datatype %s" % str(dt))

                    operation_figures = dao.get_figures_for_operation(result['id'])

                    # Compute the full path to the figure / image on disk
                    for figure in operation_figures:
                        figures_folder = self.structure_helper.get_images_folder(figure.project.name)
                        figure_full_path = os.path.join(figures_folder, figure.file_path)
                        # Compute the path available from browser
                        figure.figure_path = utils.path2url_part(figure_full_path)

                    result['figures'] = operation_figures
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                ## We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception("Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no
Exemplo n.º 32
0
    def import_list_of_operations(self,
                                  project,
                                  import_path,
                                  is_group=False,
                                  importer_operation_id=None):
        """
        This method scans provided folder and identify all operations that needs to be imported
        """
        all_dts_count = 0
        all_stored_dts_count = 0
        imported_operations = []
        ordered_operations = self._retrieve_operations_in_order(
            project, import_path, None if is_group else importer_operation_id)

        if is_group and len(ordered_operations) > 0:
            first_op = dao.get_operation_by_id(importer_operation_id)
            vm_path = h5.determine_filepath(first_op.view_model_gid,
                                            os.path.dirname(import_path))
            os.remove(vm_path)

            ordered_operations[0].operation.id = importer_operation_id

        for operation_data in ordered_operations:
            if operation_data.is_old_form:
                operation_entity, datatype_group = self.import_operation(
                    operation_data.operation)
                new_op_folder = self.storage_interface.get_project_folder(
                    project.name, str(operation_entity.id))

                try:
                    operation_datatypes = self._load_datatypes_from_operation_folder(
                        operation_data.operation_folder, operation_entity,
                        datatype_group)
                    # Create and store view_model from operation
                    self.create_view_model(operation_entity, operation_data,
                                           new_op_folder)

                    self._store_imported_datatypes_in_db(
                        project, operation_datatypes)
                    imported_operations.append(operation_entity)
                except MissingReferenceException:
                    operation_entity.status = STATUS_ERROR
                    dao.store_entity(operation_entity)

            elif operation_data.main_view_model is not None:
                operation_data.operation.create_date = datetime.now()
                operation_data.operation.start_date = datetime.now()
                operation_data.operation.completion_date = datetime.now()

                do_merge = False
                if importer_operation_id:
                    do_merge = True
                operation_entity = dao.store_entity(operation_data.operation,
                                                    merge=do_merge)
                dt_group = None
                op_group = dao.get_operationgroup_by_id(
                    operation_entity.fk_operation_group)
                if op_group:
                    dt_group = dao.get_datatypegroup_by_op_group_id(
                        op_group.id)
                    if not dt_group:
                        first_op = dao.get_operations_in_group(
                            op_group.id, only_first_operation=True)
                        dt_group = DataTypeGroup(
                            op_group,
                            operation_id=first_op.id,
                            state=DEFAULTDATASTATE_INTERMEDIATE)
                        dt_group = dao.store_entity(dt_group)
                # Store the DataTypes in db
                dts = {}
                all_dts_count += len(operation_data.dt_paths)
                for dt_path in operation_data.dt_paths:
                    dt = self.load_datatype_from_file(dt_path,
                                                      operation_entity.id,
                                                      dt_group, project.id)
                    if isinstance(dt, BurstConfiguration):
                        if op_group:
                            dt.fk_operation_group = op_group.id
                        all_stored_dts_count += self._store_or_link_burst_config(
                            dt, dt_path, project.id)
                    else:
                        dts[dt_path] = dt
                        if op_group:
                            op_group.fill_operationgroup_name(dt.type)
                            dao.store_entity(op_group)
                try:
                    stored_dts_count = self._store_imported_datatypes_in_db(
                        project, dts)
                    all_stored_dts_count += stored_dts_count

                    if operation_data.main_view_model.is_metric_operation:
                        self._update_burst_metric(operation_entity)

                    imported_operations.append(operation_entity)
                    new_op_folder = self.storage_interface.get_project_folder(
                        project.name, str(operation_entity.id))
                    view_model_disk_size = 0
                    for h5_file in operation_data.all_view_model_files:
                        view_model_disk_size += StorageInterface.compute_size_on_disk(
                            h5_file)
                        shutil.move(h5_file, new_op_folder)
                    operation_entity.view_model_disk_size = view_model_disk_size
                    dao.store_entity(operation_entity)
                except MissingReferenceException as excep:
                    self.storage_interface.remove_operation_data(
                        project.name, operation_entity.id)
                    operation_entity.fk_operation_group = None
                    dao.store_entity(operation_entity)
                    dao.remove_entity(DataTypeGroup, dt_group.id)
                    raise excep
            else:
                self.logger.warning(
                    "Folder %s will be ignored, as we could not find a serialized "
                    "operation or DTs inside!" %
                    operation_data.operation_folder)

            # We want importer_operation_id to be kept just for the first operation (the first iteration)
            if is_group:
                importer_operation_id = None

        self._update_dt_groups(project.id)
        self._update_burst_configurations(project.id)
        return imported_operations, all_dts_count, all_stored_dts_count
    def retrieve_project_full(self, project_id, applied_filters=None, current_page=1):
        """
        Return a Tuple with Project entity and Operations for current Project.
        :param project_id: Current Project Identifier
        :param applied_filters: Filters to apply on Operations
        :param current_page: Number for current page in operations
        """
        selected_project = self.find_project(project_id)
        total_filtered = self.count_filtered_operations(project_id, applied_filters)
        start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1)

        if total_filtered >= start_idx + OPERATIONS_PAGE_SIZE:
            end_idx = OPERATIONS_PAGE_SIZE
        else:
            end_idx = total_filtered - start_idx

        pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0)
        total_ops_nr = self.count_filtered_operations(project_id)
        current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, end_idx)
        started_ops = 0
        if current_ops is None:
            return selected_project, [], 0
        operations = []
        for one_op in current_ops:
            try:
                result = {}
                if one_op[0] != one_op[1]:
                    result["id"] = str(one_op[0]) + "-" + str(one_op[1])
                else:
                    result["id"] = str(one_op[0])
                burst = dao.get_burst_for_operation_id(one_op[0])
                result["burst_name"] = burst.name if burst else '-'
                result["count"] = one_op[2]
                result["gid"] = one_op[14]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(model.OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3])
                        datatype = dao.get_datatype_by_id(datatype_group.id)
                        result["datatype_group_gid"] = datatype.gid
                        result["gid"] = operation_group.gid
                        
                        all_categs = dao.get_algorithm_categories()
                        view_categ = dao.get_visualisers_categories()[0]
                        excludes = [categ.id for categ in all_categs if categ.id != view_categ.id]
                        algo = self.retrieve_launchers("DataTypeGroup", datatype.gid,
                                                       exclude_categories=excludes).values()[0]

                        view_groups = []
                        for algo in algo.values():
                            url = '/flow/' + str(algo['category']) + '/' + str(algo['id'])
                            if algo['part_of_group']:
                                url = '/flow/prepare_group_launch/' + datatype.gid + '/' + \
                                      str(algo['category']) + '/' + str(algo['id'])
                            view_groups.append(dict(name=algo["displayName"],
                                                    url=url,
                                                    param_name=algo['children'][0]['param_name'],
                                                    part_of_group=algo['part_of_group']))
                        result["view_groups"] = view_groups

                    except Exception, excep:
                        self.logger.error(excep)
                        self.logger.warning("Will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["method"] = one_op[5]
                result["user"] = dao.get_user_by_id(one_op[6])
                if type(one_op[7]) in (str, unicode):
                    result["create"] = string2date(str(one_op[7]))
                else:
                    result["create"] = one_op[7]
                if type(one_op[8]) in (str, unicode):
                    result["start"] = string2date(str(one_op[8]))
                else:
                    result["start"] = one_op[8]
                if type(one_op[9]) in (str, unicode):
                    result["complete"] = string2date(str(one_op[9]))
                else:
                    result["complete"] = one_op[9]

                if result["complete"] is not None and result["start"] is not None:
                    result["duration"] = timedelta2string(result["complete"] - result["start"])
                result["status"] = one_op[10]
                if result["status"] == model.STATUS_STARTED:
                    started_ops += 1
                result["additional"] = one_op[11]
                result["visible"] = True if one_op[12] > 0 else False
                result['operation_tag'] = one_op[13]
                result['figures'] = None
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(result['id'])
                    result['results'] = [dao.get_generic_entity(dt.module + '.' + dt.type,
                                                                dt.gid, 'gid')[0] for dt in datatype_results]
                    operation_figures = dao.get_figures_for_operation(result['id'])

                    # Compute the full path to the figure / image on disk
                    for figure in operation_figures:
                        figures_folder = self.structure_helper.get_images_folder(figure.project.name,
                                                                                 figure.operation.id)
                        figure_full_path = os.path.join(figures_folder, figure.file_path)
                        # Compute the path available from browser 
                        figure.figure_path = utils.path2url_part(figure_full_path)

                    result['figures'] = operation_figures
                else:
                    result['results'] = None
                operations.append(result)
Exemplo n.º 34
0
    def remove_datatype(self, project_id, datatype_gid, skip_validation=False, existing_dt_links=None):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
            return

        if datatype.parent_operation.fk_launched_in != int(project_id):
            self.logger.warning("Datatype with GUID [%s] has been moved to another project and does "
                                "not need to be deleted anymore." % datatype_gid)
            return

        is_datatype_group = False
        datatype_group = None
        new_dt_links = []

        # Datatype Groups were already handled when the first DatatypeMeasureIndex has been found
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
            datatype_group = datatype
        # Found the first DatatypeMeasureIndex from a group
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            # We load it this way to make sure we have the 'fk_operation_group' in every case
            datatype_group_gid = dao.get_datatype_by_id(datatype.fk_datatype_group).gid
            datatype_group = h5.load_entity_by_gid(datatype_group_gid)

        operations_set = [datatype.fk_from_operation]
        correct = True

        if is_datatype_group:
            operations_set = [datatype_group.fk_from_operation]
            self.logger.debug("Removing datatype group %s" % datatype_group)

            datatypes = self.get_all_datatypes_from_data(datatype_group)
            first_datatype = datatypes[0]

            if hasattr(first_datatype, 'fk_source_gid'):
                ts = h5.load_entity_by_gid(first_datatype.fk_source_gid)
                ts_group = dao.get_datatypegroup_by_op_group_id(ts.parent_operation.fk_operation_group)
                dm_group = datatype_group
            else:
                dt_measure_index = get_class_by_name("{}.{}".format(DATATYPE_MEASURE_INDEX_MODULE,
                                                                    DATATYPE_MEASURE_INDEX_CLASS))
                dm_group = dao.get_datatype_measure_group_from_ts_from_pse(first_datatype.gid, dt_measure_index)
                ts_group = datatype_group

            links = []

            if ts_group:
                links.extend(dao.get_links_for_datatype(ts_group.id))
                correct = correct and self._remove_operation_group(ts_group.fk_operation_group, project_id,
                                                                   skip_validation, operations_set, links)

            if dm_group:
                links.extend(dao.get_links_for_datatype(dm_group.id))
                correct = correct and self._remove_operation_group(dm_group.fk_operation_group, project_id,
                                                                   skip_validation, operations_set, links)

            if len(links) > 0:
                # We want to get the links for the first TSIndex directly
                # This code works for all cases
                datatypes = dao.get_datatype_in_group(ts_group.id)
                ts = datatypes[0]

                new_dt_links = self._add_links_for_datatype_references(ts, links[0].fk_to_project, links[0].id,
                                                                       existing_dt_links)

        else:
            self.logger.debug("Removing datatype %s" % datatype)
            links = dao.get_links_for_datatype(datatype.id)

            if len(links) > 0:
                new_dt_links = self._add_links_for_datatype_references(datatype, links[0].fk_to_project, links[0].id,
                                                                       existing_dt_links)

            self._remove_project_node_files(project_id, datatype.gid, links, skip_validation)

        # Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(DataType, operation_id, "fk_from_operation")
            if len(dependent_dt) > 0:
                # Do not remove Operation in case DataType still exist referring it.
                continue
            correct = correct and dao.remove_entity(Operation, operation_id)
            # Make sure Operation folder is removed
            self.storage_interface.remove_operation_data(project.name, operation_id)

        self.storage_interface.push_folder_to_sync(project.name)
        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))
        return new_dt_links
Exemplo n.º 35
0
    def import_project_operations(self,
                                  project,
                                  import_path,
                                  is_group=False,
                                  importer_operation_id=None):
        """
        This method scans provided folder and identify all operations that needs to be imported
        """
        all_dts_count = 0
        all_stored_dts_count = 0
        imported_operations = []
        ordered_operations = self._retrieve_operations_in_order(
            project, import_path, importer_operation_id)

        for operation_data in ordered_operations:

            if operation_data.is_old_form:
                operation_entity, datatype_group = self.import_operation(
                    operation_data.operation)
                new_op_folder = self.files_helper.get_project_folder(
                    project, str(operation_entity.id))

                try:
                    operation_datatypes = self._load_datatypes_from_operation_folder(
                        operation_data.operation_folder, operation_entity,
                        datatype_group)
                    # Create and store view_model from operation
                    self.create_view_model(operation_entity, operation_data,
                                           new_op_folder)

                    self._store_imported_datatypes_in_db(
                        project, operation_datatypes)
                    imported_operations.append(operation_entity)
                except MissingReferenceException:
                    operation_entity.status = STATUS_ERROR
                    dao.store_entity(operation_entity)

            elif operation_data.main_view_model is not None:
                do_merge = False
                if importer_operation_id:
                    do_merge = True
                operation_entity = dao.store_entity(operation_data.operation,
                                                    merge=do_merge)
                dt_group = None
                op_group = dao.get_operationgroup_by_id(
                    operation_entity.fk_operation_group)
                if op_group:
                    dt_group = dao.get_datatypegroup_by_op_group_id(
                        op_group.id)
                    if not dt_group:
                        first_op = dao.get_operations_in_group(
                            op_group.id, only_first_operation=True)
                        dt_group = DataTypeGroup(
                            op_group,
                            operation_id=first_op.id,
                            state=DEFAULTDATASTATE_INTERMEDIATE)
                        dt_group = dao.store_entity(dt_group)
                # Store the DataTypes in db
                dts = {}
                all_dts_count += len(operation_data.dt_paths)
                for dt_path in operation_data.dt_paths:
                    dt = self.load_datatype_from_file(dt_path,
                                                      operation_entity.id,
                                                      dt_group, project.id)
                    if isinstance(dt, BurstConfiguration):
                        if op_group:
                            dt.fk_operation_group = op_group.id
                        all_stored_dts_count += self._store_or_link_burst_config(
                            dt, dt_path, project.id)
                    else:
                        dts[dt_path] = dt
                        if op_group:
                            op_group.fill_operationgroup_name(dt.type)
                            dao.store_entity(op_group)
                try:
                    stored_dts_count = self._store_imported_datatypes_in_db(
                        project, dts)
                    all_stored_dts_count += stored_dts_count

                    if operation_data.main_view_model.is_metric_operation:
                        self._update_burst_metric(operation_entity)

                    #TODO: TVB-2849 to reveiw these flags and simplify condition
                    if stored_dts_count > 0 or (
                            not operation_data.is_self_generated and
                            not is_group) or importer_operation_id is not None:
                        imported_operations.append(operation_entity)
                        new_op_folder = self.files_helper.get_project_folder(
                            project, str(operation_entity.id))
                        view_model_disk_size = 0
                        for h5_file in operation_data.all_view_model_files:
                            view_model_disk_size += FilesHelper.compute_size_on_disk(
                                h5_file)
                            shutil.move(h5_file, new_op_folder)
                        operation_entity.view_model_disk_size = view_model_disk_size
                        dao.store_entity(operation_entity)
                    else:
                        # In case all Dts under the current operation were Links and the ViewModel is dummy,
                        # don't keep the Operation empty in DB
                        dao.remove_entity(Operation, operation_entity.id)
                        self.files_helper.remove_operation_data(
                            project.name, operation_entity.id)
                except MissingReferenceException as excep:
                    dao.remove_entity(Operation, operation_entity.id)
                    self.files_helper.remove_operation_data(
                        project.name, operation_entity.id)
                    raise excep
            else:
                self.logger.warning(
                    "Folder %s will be ignored, as we could not find a serialized "
                    "operation or DTs inside!" %
                    operation_data.operation_folder)

        self._update_dt_groups(project.id)
        self._update_burst_configurations(project.id)
        return imported_operations, all_dts_count, all_stored_dts_count
Exemplo n.º 36
0
 def get_datatypegroup_by_op_group_id(operation_group_id):
     """ Returns the DataTypeGroup with the specified id. """
     return dao.get_datatypegroup_by_op_group_id(operation_group_id)
Exemplo n.º 37
0
    def retrieve_project_full(self, project_id, applied_filters=None, current_page=1):
        """
        Return a Tuple with Project entity and Operations for current Project.
        :param project_id: Current Project Identifier
        :param applied_filters: Filters to apply on Operations
        :param current_page: Number for current page in operations
        """
        selected_project = self.find_project(project_id)
        total_filtered = self.count_filtered_operations(project_id, applied_filters)
        pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0)
        total_ops_nr = self.count_filtered_operations(project_id)

        start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1)
        current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, OPERATIONS_PAGE_SIZE)
        if current_ops is None:
            return selected_project, 0, [], 0

        operations = []
        view_categ_id = dao.get_visualisers_categories()[0].id
        for one_op in current_ops:
            try:
                result = {}
                if one_op[0] != one_op[1]:
                    result["id"] = str(one_op[0]) + "-" + str(one_op[1])
                else:
                    result["id"] = str(one_op[0])
                burst = dao.get_burst_for_operation_id(one_op[0])
                result["burst_name"] = burst.name if burst else '-'
                result["count"] = one_op[2]
                result["gid"] = one_op[14]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(model.OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3])
                        result["datatype_group_gid"] = datatype_group.gid
                        result["gid"] = operation_group.gid

                        ## Filter only viewers for current DataTypeGroup entity:
                        launcher = self.retrieve_launchers(datatype_group.gid,
                                                           include_categories=[view_categ_id]).values()[0]
                        view_groups = []
                        for launcher in launcher.values():
                            url = '/flow/' + str(launcher['category']) + '/' + str(launcher['id'])
                            if launcher['part_of_group']:
                                url = '/flow/prepare_group_launch/' + datatype_group.gid + '/' + \
                                      str(launcher['category']) + '/' + str(launcher['id'])
                            view_groups.append(dict(name=launcher["displayName"],
                                                    url=url,
                                                    param_name=launcher['children'][0]['param_name'],
                                                    part_of_group=launcher['part_of_group']))
                        result["view_groups"] = view_groups

                    except Exception:
                        self.logger.exception("We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["method"] = one_op[5]
                result["user"] = dao.get_user_by_id(one_op[6])
                if type(one_op[7]) in (str, unicode):
                    result["create"] = string2date(str(one_op[7]))
                else:
                    result["create"] = one_op[7]
                if type(one_op[8]) in (str, unicode):
                    result["start"] = string2date(str(one_op[8]))
                else:
                    result["start"] = one_op[8]
                if type(one_op[9]) in (str, unicode):
                    result["complete"] = string2date(str(one_op[9]))
                else:
                    result["complete"] = one_op[9]

                if result["complete"] is not None and result["start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] - result["start"])
                result["status"] = one_op[10]
                result["additional"] = one_op[11]
                result["visible"] = True if one_op[12] > 0 else False
                result['operation_tag'] = one_op[13]
                result['figures'] = None
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(result['id'])
                    result['results'] = [dao.get_generic_entity(dt.module + '.' + dt.type,
                                                                dt.gid, 'gid')[0] for dt in datatype_results]
                    operation_figures = dao.get_figures_for_operation(result['id'])

                    # Compute the full path to the figure / image on disk
                    for figure in operation_figures:
                        figures_folder = self.structure_helper.get_images_folder(figure.project.name)
                        figure_full_path = os.path.join(figures_folder, figure.file_path)
                        # Compute the path available from browser
                        figure.figure_path = utils.path2url_part(figure_full_path)

                    result['figures'] = operation_figures
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                ## We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception("Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no
Exemplo n.º 38
0
    def _capture_operation_results(self, result, user_tag=None):
        """
        After an operation was finished, make sure the results are stored
        in DB storage and the correct meta-data,IDs are set.
        """
        results_to_store = []
        data_type_group_id = None
        operation = dao.get_operation_by_id(self.operation_id)
        if operation.user_group is None or len(operation.user_group) == 0:
            operation.user_group = date2string(
                datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT)
            operation = dao.store_entity(operation)
        if self._is_group_launch():
            data_type_group_id = dao.get_datatypegroup_by_op_group_id(
                operation.fk_operation_group).id
        # All entities will have the same subject and state
        subject = self.meta_data[DataTypeMetaData.KEY_SUBJECT]
        state = self.meta_data[DataTypeMetaData.KEY_STATE]
        burst_reference = None
        if DataTypeMetaData.KEY_BURST in self.meta_data:
            burst_reference = self.meta_data[DataTypeMetaData.KEY_BURST]
        perpetuated_identifier = None
        if DataTypeMetaData.KEY_TAG_1 in self.meta_data:
            perpetuated_identifier = self.meta_data[DataTypeMetaData.KEY_TAG_1]

        for res in result:
            if res is None:
                continue
            res.subject = str(subject)
            res.state = state
            res.fk_parent_burst = burst_reference
            res.fk_from_operation = self.operation_id
            res.framework_metadata = self.meta_data
            if not res.user_tag_1:
                res.user_tag_1 = user_tag if user_tag is not None else perpetuated_identifier
            else:
                res.user_tag_2 = user_tag if user_tag is not None else perpetuated_identifier
            res.fk_datatype_group = data_type_group_id
            ## Compute size-on disk, in case file-storage is used
            if hasattr(res, 'storage_path') and hasattr(
                    res, 'get_storage_file_name'):
                associated_file = os.path.join(res.storage_path,
                                               res.get_storage_file_name())
                res.close_file()
                res.disk_size = self.file_handler.compute_size_on_disk(
                    associated_file)
            res = dao.store_entity(res)
            # Write metaData
            res.persist_full_metadata()
            results_to_store.append(res)
        del result[0:len(result)]
        result.extend(results_to_store)

        if len(result) and self._is_group_launch():
            ## Update the operation group name
            operation_group = dao.get_operationgroup_by_id(
                operation.fk_operation_group)
            operation_group.fill_operationgroup_name(result[0].type)
            dao.store_entity(operation_group)

        return 'Operation ' + str(
            self.operation_id) + ' has finished.', len(results_to_store)
Exemplo n.º 39
0
    def retrieve_project_full(self,
                              project_id,
                              applied_filters=None,
                              current_page=1):
        """
        Return a Tuple with Project entity and Operations for current Project.
        :param project_id: Current Project Identifier
        :param applied_filters: Filters to apply on Operations
        :param current_page: Number for current page in operations
        """
        selected_project = self.find_project(project_id)
        total_filtered = self.count_filtered_operations(
            project_id, applied_filters)
        pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (
            1 if total_filtered % OPERATIONS_PAGE_SIZE else 0)
        total_ops_nr = self.count_filtered_operations(project_id)

        start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1)
        current_ops = dao.get_filtered_operations(project_id, applied_filters,
                                                  start_idx,
                                                  OPERATIONS_PAGE_SIZE)
        if current_ops is None:
            return selected_project, 0, [], 0

        operations = []
        for one_op in current_ops:
            try:
                result = {}
                if one_op[0] != one_op[1]:
                    result["id"] = str(one_op[0]) + "-" + str(one_op[1])
                else:
                    result["id"] = str(one_op[0])
                burst = dao.get_burst_for_operation_id(one_op[0])
                result["burst_name"] = burst.name if burst else '-'
                result["count"] = one_op[2]
                result["gid"] = one_op[13]
                operation_group_id = one_op[3]
                if operation_group_id is not None and operation_group_id:
                    try:
                        operation_group = dao.get_generic_entity(
                            OperationGroup, operation_group_id)[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(
                            operation_group_id)
                        result[
                            "datatype_group_gid"] = datatype_group.gid if datatype_group is not None else None
                        result["gid"] = operation_group.gid
                        # Filter only viewers for current DataTypeGroup entity:

                        if datatype_group is None:
                            view_groups = None
                        else:
                            view_groups = AlgorithmService(
                            ).get_visualizers_for_group(datatype_group.gid)
                        result["view_groups"] = view_groups
                    except Exception:
                        self.logger.exception(
                            "We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["user"] = dao.get_user_by_id(one_op[5])
                if type(one_op[6]) is str:
                    result["create"] = string2date(str(one_op[6]))
                else:
                    result["create"] = one_op[6]
                if type(one_op[7]) is str:
                    result["start"] = string2date(str(one_op[7]))
                else:
                    result["start"] = one_op[7]
                if type(one_op[8]) is str:
                    result["complete"] = string2date(str(one_op[8]))
                else:
                    result["complete"] = one_op[8]

                if result["complete"] is not None and result[
                        "start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] -
                                                          result["start"])
                result["status"] = one_op[9]
                result["additional"] = one_op[10]
                result["visible"] = True if one_op[11] > 0 else False
                result['operation_tag'] = one_op[12]
                if not result['group']:
                    result['results'] = dao.get_results_for_operation(
                        result['id'])
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                # We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception(
                    "Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no