def _check_datatype_group_removed(self, datatype_group_id, operation_groupp_id):
        """
        Checks if the DataTypeGroup and OperationGroup was removed.
        """
        try:
            dao.get_generic_entity(DataTypeGroup, datatype_group_id)
            raise AssertionError("The DataTypeGroup entity was not removed.")
        except Exception:
            pass

        try:
            dao.get_operationgroup_by_id(operation_groupp_id)
            raise AssertionError("The OperationGroup entity was not removed.")
        except Exception:
            pass
    def _check_datatype_group_removed(self, datatype_group_id, operation_groupp_id):
        """
        Checks if the DataTypeGroup and OperationGroup was removed.
        """
        try:
            dao.get_generic_entity(model.DataTypeGroup, datatype_group_id)
            self.fail("The DataTypeGroup entity was not removed.")
        except Exception:
            pass

        try:
            dao.get_operationgroup_by_id(operation_groupp_id)
            self.fail("The OperationGroup entity was not removed.")
        except Exception:
            pass
예제 #3
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['DATA_TYPES_GROUPS']
    create_column(COL_RANGES_1, table)
    create_column(COL_RANGES_2, table)

    try:
        ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column.
        previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0", "no_of_ranges")

        for group in previous_groups:

            operation_group = dao.get_operationgroup_by_id(group.fk_operation_group)
            #group.only_numeric_ranges = operation_group.has_only_numeric_ranges

            if operation_group.range3 is not None:
                group.no_of_ranges = 3
            elif operation_group.range2 is not None:
                group.no_of_ranges = 2
            elif operation_group.range1 is not None:
                group.no_of_ranges = 1
            else:
                group.no_of_ranges = 0

            dao.store_entity(group)

    except Exception, excep:
        ## we can live with a column only having default value. We will not stop the startup.
        logger = get_logger(__name__)
        logger.exception(excep)
예제 #4
0
    def prepare_parameters(datatype_group_gid, back_page, color_metric=None, size_metric=None):
        """
        We suppose that there are max 2 ranges and from each operation results exactly one dataType.

        :param datatype_group_gid: the group id for the `DataType` to be visualised
        :param color_metric: a list of `DataTypeMeasure` which has been executed on `datatype_group_gid`
        :param size_metric:  a list of `DataTypeMeasure` which has been executed on `datatype_group_gid`

        :returns: `ContextDiscretePSE`

        :raises Exception: when `datatype_group_id` is invalid (not in database)
        """
        datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
        if datatype_group is None:
            raise Exception("Selected DataTypeGroup is no longer present in the database. "
                            "It might have been remove or the specified id is not the correct one.")

        operation_group = dao.get_operationgroup_by_id(datatype_group.fk_operation_group)

        range1_name, range1_values, range1_labels = DiscretePSEAdapter.prepare_range_labels(operation_group,
                                                                                            operation_group.range1)
        range2_name, range2_values, range2_labels = DiscretePSEAdapter.prepare_range_labels(operation_group,
                                                                                            operation_group.range2)

        pse_context = ContextDiscretePSE(datatype_group_gid, color_metric, size_metric, back_page)
        pse_context.setRanges(range1_name, range1_values, range1_labels, range2_name, range2_values, range2_labels)
        final_dict = {}
        operations = dao.get_operations_in_group(operation_group.id)

        for operation_ in operations:
            if not operation_.has_finished:
                pse_context.has_started_ops = True
            range_values = eval(operation_.range_values)
            key_1 = range_values[range1_name]
            key_2 = model.RANGE_MISSING_STRING
            if range2_name is not None:
                key_2 = range_values[range2_name]

            datatype = None
            if operation_.status == model.STATUS_FINISHED:
                datatypes = dao.get_results_for_operation(operation_.id)
                if len(datatypes) > 0:
                    datatype = datatypes[0]
                    if datatype.type == "DatatypeMeasure":
                        ## Load proper entity class from DB.
                        measures = dao.get_generic_entity(DatatypeMeasure, datatype.id)
                    else:
                        measures = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype')
                    pse_context.prepare_metrics_datatype(measures, datatype)

            if key_1 not in final_dict:
                final_dict[key_1] = {}

            final_dict[key_1][key_2] = pse_context.build_node_info(operation_, datatype)

        pse_context.fill_object(final_dict)
        ## datatypes_dict is not actually used in the drawing of the PSE and actually
        ## causes problems in case of NaN values, so just remove it before creating the json
        pse_context.datatypes_dict = {}
        return pse_context
예제 #5
0
    def import_operation(operation_entity, migration=False):
        """
        Store a Operation entity.
        """
        do_merge = False
        if operation_entity.id:
            do_merge = True
        operation_entity = dao.store_entity(operation_entity, merge=do_merge)
        operation_group_id = operation_entity.fk_operation_group
        datatype_group = None

        if operation_group_id is not None:
            datatype_group = dao.get_datatypegroup_by_op_group_id(
                operation_group_id)

            if datatype_group is None and migration is False:
                # If no dataType group present for current op. group, create it.
                operation_group = dao.get_operationgroup_by_id(
                    operation_group_id)
                datatype_group = DataTypeGroup(
                    operation_group, operation_id=operation_entity.id)
                datatype_group.state = UploadAlgorithmCategoryConfig.defaultdatastate
                datatype_group = dao.store_entity(datatype_group)

        return operation_entity, datatype_group
예제 #6
0
    def create_view_model(self,
                          operation_entity,
                          operation_data,
                          new_op_folder,
                          generic_attributes=None,
                          add_params=None):
        view_model = self._get_new_form_view_model(
            operation_entity, operation_data.info_from_xml)
        if add_params is not None:
            for element in add_params:
                key_attr = getattr(view_model, element[0])
                setattr(key_attr, element[1], element[2])

        view_model.range_values = operation_entity.range_values
        op_group = dao.get_operationgroup_by_id(
            operation_entity.fk_operation_group)
        if op_group:
            view_model.operation_group_gid = uuid.UUID(op_group.gid)
            view_model.ranges = json.dumps(op_group.range_references)
            view_model.is_metric_operation = 'DatatypeMeasure' in op_group.name

        if generic_attributes is not None:
            view_model.generic_attributes = generic_attributes
        view_model.generic_attributes.operation_tag = operation_entity.user_group

        h5.store_view_model(view_model, new_op_folder)
        view_model_disk_size = StorageInterface.compute_recursive_h5_disk_usage(
            new_op_folder)
        operation_entity.view_model_disk_size = view_model_disk_size
        operation_entity.view_model_gid = view_model.gid.hex
        dao.store_entity(operation_entity)
        return view_model
예제 #7
0
    def prepare_indexes_for_simulation_results(self, operation, result_filenames, burst):
        indexes = list()
        self.logger.debug("Preparing indexes for simulation results in operation {}...".format(operation.id))
        for filename in result_filenames:
            try:
                self.logger.debug("Preparing index for filename: {}".format(filename))
                index = h5.index_for_h5_file(filename)()
                h5_class = h5.REGISTRY.get_h5file_for_index(type(index))

                with h5_class(filename) as index_h5:
                    index.fill_from_h5(index_h5)
                    index.fill_from_generic_attributes(index_h5.load_generic_attributes())

                index.fk_parent_burst = burst.gid
                index.fk_from_operation = operation.id
                if operation.fk_operation_group:
                    datatype_group = dao.get_datatypegroup_by_op_group_id(operation.fk_operation_group)
                    self.logger.debug(
                        "Found DatatypeGroup with id {} for operation {}".format(datatype_group.id, operation.id))
                    index.fk_datatype_group = datatype_group.id

                    # Update the operation group name
                    operation_group = dao.get_operationgroup_by_id(operation.fk_operation_group)
                    operation_group.fill_operationgroup_name("TimeSeriesRegionIndex")
                    dao.store_entity(operation_group)
                self.logger.debug(
                    "Prepared index {} for file {} in operation {}".format(index.summary_info, filename, operation.id))
                indexes.append(index)
            except Exception as e:
                self.logger.debug("Skip preparing index {} because there was an error.".format(filename))
                self.logger.error(e)
        self.logger.debug("Prepared {} indexes for results in operation {}...".format(len(indexes), operation.id))
        return indexes
예제 #8
0
    def update_db_with_results(operation, sim_h5_filenames, metric_operation,
                               metric_h5_filename):
        # type: (Operation, list, Operation, str) -> (str, int)
        """
        Generate corresponding Index entities for the resulted H5 files and insert them in DB.
        """
        burst_service = BurstService()
        index_list = []
        is_group = operation.fk_operation_group is not None
        burst_config = burst_service.get_burst_for_operation_id(operation.id)
        if is_group:
            burst_config = burst_service.get_burst_for_operation_id(
                operation.fk_operation_group, True)
        all_indexes = burst_service.prepare_indexes_for_simulation_results(
            operation, sim_h5_filenames, burst_config)
        if is_group:
            # Update the operation group name
            operation_group = dao.get_operationgroup_by_id(
                metric_operation.fk_operation_group)
            operation_group.fill_operationgroup_name("DatatypeMeasureIndex")
            dao.store_entity(operation_group)

            metric_index = burst_service.prepare_index_for_metric_result(
                metric_operation, metric_h5_filename, burst_config)
            all_indexes.append(metric_index)

        for index in all_indexes:
            index = dao.store_entity(index)
            index_list.append(index)

        burst_service.update_burst_status(burst_config)
예제 #9
0
    def _prepare_metric_operation(self, sim_operation):
        # type: (Operation) -> Operation
        metric_algo = dao.get_algorithm_by_module(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS)
        datatype_index = h5.REGISTRY.get_index_for_datatype(TimeSeries)
        time_series_index = dao.get_generic_entity(datatype_index, sim_operation.id, 'fk_from_operation')[0]
        ga = self.prepare_metadata(metric_algo.algorithm_category, time_series_index.fk_parent_burst)
        ga.visible = False

        view_model = get_class_by_name("{}.{}".format(MEASURE_METRICS_MODULE, MEASURE_METRICS_MODEL_CLASS))()
        view_model.time_series = time_series_index.gid
        view_model.algorithms = tuple(ALGORITHMS.keys())
        view_model.generic_attributes = ga

        parent_burst = dao.get_generic_entity(BurstConfiguration, time_series_index.fk_parent_burst, 'gid')[0]
        metric_op_group = dao.get_operationgroup_by_id(parent_burst.fk_metric_operation_group)
        metric_operation_group_id = parent_burst.fk_metric_operation_group
        range_values = sim_operation.range_values
        view_model.operation_group_gid = uuid.UUID(metric_op_group.gid)
        view_model.ranges = json.dumps(parent_burst.ranges)
        view_model.range_values = range_values
        view_model.is_metric_operation = True
        metric_operation = Operation(view_model.gid.hex, sim_operation.fk_launched_by, sim_operation.fk_launched_in,
                                     metric_algo.id, user_group=ga.operation_tag, op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        metric_operation = dao.store_entity(metric_operation)

        metrics_datatype_group = dao.get_generic_entity(DataTypeGroup, metric_operation_group_id,
                                                        'fk_operation_group')[0]
        if metrics_datatype_group.fk_from_operation is None:
            metrics_datatype_group.fk_from_operation = metric_operation.id
            dao.store_entity(metrics_datatype_group)

        self.store_view_model(metric_operation, sim_operation.project, view_model)
        return metric_operation
예제 #10
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['DATA_TYPES_GROUPS']
    create_column(COL_RANGES_1, table)
    create_column(COL_RANGES_2, table)

    try:
        ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column.
        previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0", "no_of_ranges")

        for group in previous_groups:

            operation_group = dao.get_operationgroup_by_id(group.fk_operation_group)
            #group.only_numeric_ranges = operation_group.has_only_numeric_ranges

            if operation_group.range3 is not None:
                group.no_of_ranges = 3
            elif operation_group.range2 is not None:
                group.no_of_ranges = 2
            elif operation_group.range1 is not None:
                group.no_of_ranges = 1
            else:
                group.no_of_ranges = 0

            dao.store_entity(group)

    except Exception as excep:
        ## we can live with a column only having default value. We will not stop the startup.
        logger = get_logger(__name__)
        logger.exception(excep)
        
    session = SA_SESSIONMAKER()
    session.execute(text("""UPDATE "OPERATIONS"
                               SET status = 
                                CASE
                                    WHEN status = 'FINISHED' THEN '4-FINISHED'
                                    WHEN status = 'STARTED' THEN '3-STARTED'
                                    WHEN status = 'CANCELED' THEN '2-CANCELED'
                                    ELSE '1-ERROR'
                                END
                             WHERE status IN ('FINISHED', 'CANCELED', 'STARTED', 'ERROR');"""))
    session.commit()
    session.close()

    try:
        session = SA_SESSIONMAKER()
        for sim_state in session.query(SimulationState).filter(SimulationState.fk_datatype_group is not None).all():
            session.delete(sim_state)
        session.commit()
        session.close()
    except Exception as excep:
        ## It might happen that SimulationState table is not yet created, e.g. if user has version 1.0.2
        logger = get_logger(__name__)
        logger.exception(excep)
예제 #11
0
    def prepare_parameters(datatype_group_gid,
                           back_page,
                           color_metric=None,
                           size_metric=None):
        """
        We suppose that there are max 2 ranges and from each operation results exactly one dataType.
        """
        datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
        if datatype_group is None:
            raise Exception(
                "Selected DataTypeGroup is no longer present in the database. "
                "It might have been remove or the specified id is not the correct one."
            )

        operation_group = dao.get_operationgroup_by_id(
            datatype_group.fk_operation_group)
        _, range1_name, range1_labels = operation_group.load_range_numbers(
            operation_group.range1)
        has_range2, range2_name, range2_labels = operation_group.load_range_numbers(
            operation_group.range2)

        pse_context = ContextDiscretePSE(datatype_group_gid, range1_labels,
                                         range2_labels, color_metric,
                                         size_metric, back_page)
        final_dict = dict()
        operations = dao.get_operations_in_group(operation_group.id)
        for operation_ in operations:
            if operation_.status == model.STATUS_STARTED:
                pse_context.has_started_ops = True
            range_values = eval(operation_.range_values)
            key_1 = range_values[range1_name]
            key_2 = model.RANGE_MISSING_STRING
            if has_range2 is not None:
                key_2 = range_values[range2_name]

            datatype = None
            if operation_.status == model.STATUS_FINISHED:
                datatype = dao.get_results_for_operation(operation_.id)[0]
                measures = dao.get_generic_entity(DatatypeMeasure,
                                                  datatype.gid,
                                                  '_analyzed_datatype')
                pse_context.prepare_metrics_datatype(measures, datatype)

            if key_1 not in final_dict:
                final_dict[key_1] = {
                    key_2: pse_context.build_node_info(operation_, datatype)
                }
            else:
                final_dict[key_1][key_2] = pse_context.build_node_info(
                    operation_, datatype)

        pse_context.fill_object(final_dict)
        ## datatypes_dict is not actually used in the drawing of the PSE and actually
        ## causes problems in case of NaN values, so just remove it before creating the json
        pse_context.datatypes_dict = {}
        return pse_context
예제 #12
0
    def launch(self, datatype_group, **kwargs):
        """
        Also overwrite launch from ABCDisplayer, since we want to handle a list of figures,
        instead of only one Matplotlib figure.

        :raises LaunchException: when called before all operations in the group are finished
        """
        if self.PARAM_FIGURE_SIZE in kwargs:
            figsize = kwargs[self.PARAM_FIGURE_SIZE]
            figsize = ((figsize[0]) / 80, (figsize[1]) / 80)
            del kwargs[self.PARAM_FIGURE_SIZE]
        else:
            figsize = (15, 7)

        operation_group = dao.get_operationgroup_by_id(
            datatype_group.fk_operation_group)
        _, range1_name, self.range1 = operation_group.load_range_numbers(
            operation_group.range1)
        _, range2_name, self.range2 = operation_group.load_range_numbers(
            operation_group.range2)

        # Get the computed measures on this DataTypeGroup
        first_op = dao.get_operations_in_group(operation_group.id)[0]
        if first_op.status != model.STATUS_FINISHED:
            raise LaunchException(
                "Not all operations from this range are finished. Cannot generate data until then."
            )

        datatype = dao.get_results_for_operation(first_op.id)[0]
        if datatype.type == "DatatypeMeasure":
            ## Load proper entity class from DB.
            dt_measure = dao.get_generic_entity(DatatypeMeasure,
                                                datatype.id)[0]
        else:
            dt_measure = dao.get_generic_entity(DatatypeMeasure, datatype.gid,
                                                '_analyzed_datatype')
            if dt_measure:
                dt_measure = dt_measure[0]

        figure_nrs = {}
        metrics = dt_measure.metrics if dt_measure else []
        for metric in metrics:
            # Separate plot for each metric.
            self._create_plot(metric, figsize, operation_group, range1_name,
                              range2_name, figure_nrs)

        parameters = dict(title=self._ui_name,
                          showFullToolbar=True,
                          serverIp=config.SERVER_IP,
                          serverPort=config.MPLH5_SERVER_PORT,
                          figureNumbers=figure_nrs,
                          metrics=metrics,
                          figuresJSON=json.dumps(figure_nrs))

        return self.build_display_result("pse_isocline/view", parameters)
예제 #13
0
    def _capture_operation_results(self, result, user_tag=None):
        """
        After an operation was finished, make sure the results are stored
        in DB storage and the correct meta-data,IDs are set.
        """
        results_to_store = []
        data_type_group_id = None
        operation = dao.get_operation_by_id(self.operation_id)
        if operation.user_group is None or len(operation.user_group) == 0:
            operation.user_group = date2string(datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT)
            operation = dao.store_entity(operation)
        if self._is_group_launch():
            data_type_group_id = dao.get_datatypegroup_by_op_group_id(operation.fk_operation_group).id
        # All entities will have the same subject and state
        subject = self.meta_data[DataTypeMetaData.KEY_SUBJECT]
        state = self.meta_data[DataTypeMetaData.KEY_STATE]
        burst_reference = None
        if DataTypeMetaData.KEY_BURST in self.meta_data:
            burst_reference = self.meta_data[DataTypeMetaData.KEY_BURST]
        perpetuated_identifier = None
        if DataTypeMetaData.KEY_TAG_1 in self.meta_data:
            perpetuated_identifier = self.meta_data[DataTypeMetaData.KEY_TAG_1]

        for res in result:
            if res is None:
                continue
            res.subject = str(subject)
            res.state = state
            res.fk_parent_burst = burst_reference
            res.fk_from_operation = self.operation_id
            res.framework_metadata = self.meta_data
            if not res.user_tag_1:
                res.user_tag_1 = user_tag if user_tag is not None else perpetuated_identifier
            else:
                res.user_tag_2 = user_tag if user_tag is not None else perpetuated_identifier
            res.fk_datatype_group = data_type_group_id
            ## Compute size-on disk, in case file-storage is used
            if hasattr(res, 'storage_path') and hasattr(res, 'get_storage_file_name'):
                associated_file = os.path.join(res.storage_path, res.get_storage_file_name())
                res.close_file()
                res.disk_size = self.file_handler.compute_size_on_disk(associated_file)
            res = dao.store_entity(res)
            # Write metaData
            res.persist_full_metadata()
            results_to_store.append(res)
        del result[0:len(result)]
        result.extend(results_to_store)

        if len(result) and self._is_group_launch():
            ## Update the operation group name
            operation_group = dao.get_operationgroup_by_id(operation.fk_operation_group)
            operation_group.fill_operationgroup_name(result[0].type)
            dao.store_entity(operation_group)

        return 'Operation ' + str(self.operation_id) + ' has finished.', len(results_to_store)
예제 #14
0
    def from_db(cls, operation_group_id):
        """
        Collects from db the information about the operation group that is required by the isocline view.
        """
        operations = dao.get_operations_in_group(operation_group_id)
        operation_group = dao.get_operationgroup_by_id(operation_group_id)

        self = cls(operation_group.range1, operation_group.range2, {},
                   PseIsoModel._find_metrics(operations), None)

        self._fill_apriori_data(operations)
        return self
예제 #15
0
    def from_db(cls, operation_group_id):
        """
        Collects from db the information about the operation group that is required by the isocline view.
        """
        operations = dao.get_operations_in_group(operation_group_id)
        operation_group = dao.get_operationgroup_by_id(operation_group_id)

        self = cls(operation_group.range1, operation_group.range2, {},
                   PseIsoModel._find_metrics(operations), None)

        self._fill_apriori_data(operations)
        return self
예제 #16
0
    def _remove_project_node_files(self, project_id, gid, links, skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)

            if links:
                op = dao.get_operation_by_id(datatype.fk_from_operation)
                # Instead of deleting, we copy the datatype to the linked project
                # We also clone the operation
                new_operation = self.__copy_linked_datatype_before_delete(op, datatype, project,
                                                                          links[0].fk_to_project)

                # If there is a  datatype group and operation group and they were not moved yet to the linked project,
                # then do it
                if datatype.fk_datatype_group is not None:
                    dt_group_op = dao.get_operation_by_id(datatype.fk_from_operation)
                    op_group = dao.get_operationgroup_by_id(dt_group_op.fk_operation_group)
                    op_group.fk_launched_in = links[0].fk_to_project
                    dao.store_entity(op_group)

                    burst = dao.get_burst_for_operation_id(op.id)
                    if burst is not None:
                        burst.fk_project = links[0].fk_to_project
                        dao.store_entity(burst)

                    dt_group = dao.get_datatypegroup_by_op_group_id(op_group.id)
                    dt_group.parent_operation = new_operation
                    dt_group.fk_from_operation = new_operation.id
                    dao.store_entity(dt_group)

            else:
                # There is no link for this datatype so it has to be deleted
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)

                # Remove burst if dt has one and it still exists
                if datatype.fk_parent_burst is not None and datatype.is_ts:
                    burst = dao.get_burst_for_operation_id(datatype.fk_from_operation)

                    if burst is not None:
                        dao.remove_entity(BurstConfiguration, burst.id)

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException("Remove operation failed for unknown reasons.Please contact system administrator.")
 def test_set_op_group_visibility(self):
     """
     Tests if the visibility for an operation group is set correct.
     """
     _, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1")
     list_of_operations = dao.get_operations_in_group(group_id)
     for operation in list_of_operations:
         self.assertTrue(operation.visible, "The operation should be visible.")
     op_group = dao.get_operationgroup_by_id(group_id)
     self.project_service.set_operation_and_group_visibility(op_group.gid, False, True)
     operations = dao.get_operations_in_group(group_id)
     for operation in operations:
         self.assertFalse(operation.visible, "The operation should not be visible.")
예제 #18
0
 def test_set_op_group_visibility(self):
     """
     Tests if the visibility for an operation group is set correct.
     """
     _, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1")
     list_of_operations = dao.get_operations_in_group(group_id)
     for operation in list_of_operations:
         assert operation.visible, "The operation should be visible."
     op_group = dao.get_operationgroup_by_id(group_id)
     self.project_service.set_operation_and_group_visibility(op_group.gid, False, True)
     operations = dao.get_operations_in_group(group_id)
     for operation in operations:
         assert not operation.visible, "The operation should not be visible."
예제 #19
0
    def _capture_operation_results(self, result):
        """
        After an operation was finished, make sure the results are stored
        in DB storage and the correct meta-data,IDs are set.
        """
        data_type_group_id = None
        operation = dao.get_operation_by_id(self.operation_id)
        if operation.user_group is None or len(operation.user_group) == 0:
            operation.user_group = date2string(
                datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT)
            operation = dao.store_entity(operation)
        if self._is_group_launch():
            data_type_group_id = dao.get_datatypegroup_by_op_group_id(
                operation.fk_operation_group).id
        burst_reference = None
        if DataTypeMetaData.KEY_BURST in self.meta_data:
            burst_reference = self.meta_data[DataTypeMetaData.KEY_BURST]

        count_stored = 0
        group_type = None  # In case of a group, the first not-none type is sufficient to memorize here
        for res in result:
            if res is None:
                continue
            res.subject = self.generic_attributes.subject
            res.state = self.generic_attributes.state
            res.fk_parent_burst = burst_reference
            res.fk_from_operation = self.operation_id
            res.framework_metadata = self.meta_data
            res.user_tag_1 = self.generic_attributes.user_tag_1
            res.user_tag_2 = self.generic_attributes.user_tag_2
            res.fk_datatype_group = data_type_group_id
            # Compute size-on disk, in case file-storage is used
            associated_file = h5.path_for_stored_index(res)
            if os.path.exists(associated_file):
                res.disk_size = self.file_handler.compute_size_on_disk(
                    associated_file)
                with H5File.from_file(associated_file) as f:
                    f.store_generic_attributes(self.generic_attributes)
            dao.store_entity(res)
            group_type = res.type
            count_stored += 1

        if count_stored > 0 and self._is_group_launch():
            # Update the operation group name
            operation_group = dao.get_operationgroup_by_id(
                operation.fk_operation_group)
            operation_group.fill_operationgroup_name(group_type)
            dao.store_entity(operation_group)

        return 'Operation ' + str(
            self.operation_id) + ' has finished.', count_stored
예제 #20
0
    def _capture_operation_results(self, result):
        """
        After an operation was finished, make sure the results are stored
        in DB storage and the correct meta-data,IDs are set.
        """
        data_type_group_id = None
        operation = dao.get_operation_by_id(self.operation_id)
        if operation.user_group is None or len(operation.user_group) == 0:
            operation.user_group = date2string(
                datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT)
            operation = dao.store_entity(operation)
        if self._is_group_launch():
            data_type_group_id = dao.get_datatypegroup_by_op_group_id(
                operation.fk_operation_group).id

        count_stored = 0
        if result is None:
            return "", count_stored

        group_type = None  # In case of a group, the first not-none type is sufficient to memorize here
        for res in result:
            if res is None:
                continue
            if not res.fixed_generic_attributes:
                res.fill_from_generic_attributes(self.generic_attributes)
            res.fk_from_operation = self.operation_id
            res.fk_datatype_group = data_type_group_id

            associated_file = h5.path_for_stored_index(res)
            if os.path.exists(associated_file):
                if not res.fixed_generic_attributes:
                    with H5File.from_file(associated_file) as f:
                        f.store_generic_attributes(self.generic_attributes)
                # Compute size-on disk, in case file-storage is used
                res.disk_size = self.storage_interface.compute_size_on_disk(
                    associated_file)

            dao.store_entity(res)
            res.after_store()
            group_type = res.type
            count_stored += 1

        if count_stored > 0 and self._is_group_launch():
            # Update the operation group name
            operation_group = dao.get_operationgroup_by_id(
                operation.fk_operation_group)
            operation_group.fill_operationgroup_name(group_type)
            dao.store_entity(operation_group)

        return 'Operation ' + str(
            self.operation_id) + ' has finished.', count_stored
 def test_set_op_group_visibility(self, datatype_group_factory):
     """
     Tests if the visibility for an operation group is set correct.
     """
     group = datatype_group_factory()
     list_of_operations = dao.get_operations_in_group(group.id)
     for operation in list_of_operations:
         assert operation.visible, "The operation should be visible."
     op_group = dao.get_operationgroup_by_id(group.id)
     self.project_service.set_operation_and_group_visibility(
         op_group.gid, False, True)
     operations = dao.get_operations_in_group(group.id)
     for operation in operations:
         assert not operation.visible, "The operation should not be visible."
    def launch(self, datatype_group, **kwargs):
        """
        Also overwrite launch from ABCDisplayer, since we want to handle a list of figures,
        instead of only one Matplotlib figure.

        :raises LaunchException: when called before all operations in the group are finished
        """
        if self.PARAM_FIGURE_SIZE in kwargs:
            figsize = kwargs[self.PARAM_FIGURE_SIZE]
            figsize = ((figsize[0]) / 80, (figsize[1]) / 80)
            del kwargs[self.PARAM_FIGURE_SIZE]
        else:
            figsize = (15, 7)

        operation_group = dao.get_operationgroup_by_id(datatype_group.fk_operation_group)
        _, range1_name, self.range1 = operation_group.load_range_numbers(operation_group.range1)
        _, range2_name, self.range2 = operation_group.load_range_numbers(operation_group.range2)

        for operation in dao.get_operations_in_group(operation_group.id):
            if operation.status == model.STATUS_STARTED:
                raise LaunchException("Can not display until all operations from this range are finished!")

            op_results = dao.get_results_for_operation(operation.id)
            if len(op_results):
                datatype = op_results[0]
                if datatype.type == "DatatypeMeasure":
                    ## Load proper entity class from DB.
                    dt_measure = dao.get_generic_entity(DatatypeMeasure, datatype.id)[0]
                else:
                    dt_measure = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype')
                    if dt_measure:
                        dt_measure = dt_measure[0]
                break
            else:
                dt_measure = None

        figure_nrs = {}
        metrics = dt_measure.metrics if dt_measure else {}
        if metrics:
            for metric in metrics:
                # Separate plot for each metric.
                self._create_plot(metric, figsize, operation_group, range1_name, range2_name, figure_nrs)
        else:
            raise LaunchException("No datatypes were generated due to simulation errors. Nothing to display.")

        parameters = dict(title=self._ui_name, showFullToolbar=True,
                          serverIp=config.SERVER_IP, serverPort=config.MPLH5_SERVER_PORT,
                          figureNumbers=figure_nrs, metrics=metrics, figuresJSON=json.dumps(figure_nrs))

        return self.build_display_result("pse_isocline/view", parameters)
예제 #23
0
    def __init__(self, datatype_group_gid):
        self.datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)

        if self.datatype_group is None:
            raise Exception(
                "Selected DataTypeGroup is no longer present in the database. "
                "It might have been remove or the specified id is not the correct one."
            )

        self.operation_group = dao.get_operationgroup_by_id(
            self.datatype_group.fk_operation_group)
        self.operations = dao.get_operations_in_group(self.operation_group.id)
        self.pse_model_list = self.parse_pse_data_for_display()
        self.all_metrics = dict()
        self._prepare_ranges_data()
    def set_operation_and_group_visibility(entity_gid, is_visible, is_operation_group=False):
        """
        Sets the operation visibility.

        If 'is_operation_group' is True than this method will change the visibility for all
        the operation from the OperationGroup with the GID field equal to 'entity_gid'.
        """
        if not is_operation_group:
            #we assure that if the operation belongs to a group than the visibility will be changed for the entire group
            operation = dao.get_operation_by_gid(entity_gid)
            if operation.fk_operation_group is not None:
                op_group = dao.get_operationgroup_by_id(operation.fk_operation_group)
                entity_gid = op_group.gid
                is_operation_group = True

        dao.set_operation_and_group_visibility(entity_gid, is_visible, is_operation_group)
예제 #25
0
    def load_datatype_from_file(self,
                                current_file,
                                op_id,
                                datatype_group=None,
                                current_project_id=None):
        # type: (str, int, DataTypeGroup, int) -> HasTraitsIndex
        """
        Creates an instance of datatype from storage / H5 file 
        :returns: DatatypeIndex
        """
        self.logger.debug("Loading DataType from file: %s" % current_file)
        h5_class = H5File.h5_class_from_file(current_file)

        if h5_class is BurstConfigurationH5:
            if current_project_id is None:
                op_entity = dao.get_operationgroup_by_id(op_id)
                current_project_id = op_entity.fk_launched_in
            h5_file = BurstConfigurationH5(current_file)
            burst = BurstConfiguration(current_project_id)
            burst.fk_simulation = op_id
            h5_file.load_into(burst)
            result = burst
        else:
            datatype, generic_attributes = h5.load_with_links(current_file)
            index_class = h5.REGISTRY.get_index_for_datatype(
                datatype.__class__)
            datatype_index = index_class()
            datatype_index.fill_from_has_traits(datatype)
            datatype_index.fill_from_generic_attributes(generic_attributes)

            # Add all the required attributes
            if datatype_group:
                datatype_index.fk_datatype_group = datatype_group.id
                if len(datatype_group.subject) == 0:
                    datatype_group.subject = datatype_index.subject
                    dao.store_entity(datatype_group)
            datatype_index.fk_from_operation = op_id

            associated_file = h5.path_for_stored_index(datatype_index)
            if os.path.exists(associated_file):
                datatype_index.disk_size = FilesHelper.compute_size_on_disk(
                    associated_file)
            result = datatype_index

        return result
예제 #26
0
    def prepare_node_data(datatype_group):
        if datatype_group is None:
            raise Exception("Selected DataTypeGroup is no longer present in the database. "
                            "It might have been remove or the specified id is not the correct one.")

        operation_group = dao.get_operationgroup_by_id(datatype_group.fk_operation_group)
        operations = dao.get_operations_in_group(operation_group.id)
        node_info_dict = dict()
        for operation_ in operations:
            datatypes = dao.get_results_for_operation(operation_.id)
            if len(datatypes) > 0:
                datatype = datatypes[0]
                node_info_dict[datatype.gid] = dict(operation_id=operation_.id,
                                                    datatype_gid=datatype.gid,
                                                    datatype_type=datatype.type,
                                                    datatype_subject=datatype.subject,
                                                    datatype_invalid=datatype.invalid)
        return node_info_dict
예제 #27
0
    def __import_operation(operation_entity):
        """
        Store a Operation entity.
        """
        operation_entity = dao.store_entity(operation_entity)
        operation_group_id = operation_entity.fk_operation_group
        datatype_group = None

        if operation_group_id is not None:
            try:
                datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
            except SQLAlchemyError:
                # If no dataType group present for current op. group, create it.
                operation_group = dao.get_operationgroup_by_id(operation_group_id)
                datatype_group = DataTypeGroup(operation_group, operation_id=operation_entity.id)
                datatype_group.state = UploadAlgorithmCategoryConfig.defaultdatastate
                datatype_group = dao.store_entity(datatype_group)

        return operation_entity, datatype_group
예제 #28
0
    def __import_operation(operation_entity):
        """
        Store a Operation entity.
        """
        operation_entity = dao.store_entity(operation_entity)
        operation_group_id = operation_entity.fk_operation_group
        datatype_group = None

        if operation_group_id is not None:
            try:
                datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
            except SQLAlchemyError:
                # If no dataType group present for current op. group, create it.
                operation_group = dao.get_operationgroup_by_id(operation_group_id)
                datatype_group = model.DataTypeGroup(operation_group, operation_id=operation_entity.id)
                datatype_group.state = ADAPTERS['Upload']['defaultdatastate']
                datatype_group = dao.store_entity(datatype_group)

        return operation_entity, datatype_group
예제 #29
0
    def __import_operation(operation_entity):
        """
        Store a Operation entity.
        """
        operation_entity = dao.store_entity(operation_entity)
        operation_group_id = operation_entity.fk_operation_group
        datatype_group = None

        if operation_group_id is not None:
            try:
                datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
            except SQLAlchemyError:
                # If no dataType group present for current op. group, create it.
                operation_group = dao.get_operationgroup_by_id(operation_group_id)
                datatype_group = model.DataTypeGroup(operation_group, operation_id=operation_entity.id)
                datatype_group.state = ADAPTERS['Upload']['defaultdatastate']
                datatype_group = dao.store_entity(datatype_group)

        return operation_entity, datatype_group
예제 #30
0
    def prepare_node_data(datatype_group):
        if datatype_group is None:
            raise Exception(
                "Selected DataTypeGroup is no longer present in the database. "
                "It might have been remove or the specified id is not the correct one."
            )

        operation_group = dao.get_operationgroup_by_id(
            datatype_group.fk_operation_group)
        operations = dao.get_operations_in_group(operation_group.id)
        node_info_dict = dict()
        for operation_ in operations:
            datatypes = dao.get_results_for_operation(operation_.id)
            if len(datatypes) > 0:
                datatype = datatypes[0]
                node_info_dict[datatype.gid] = dict(
                    operation_id=operation_.id,
                    datatype_gid=datatype.gid,
                    datatype_type=datatype.type,
                    datatype_subject=datatype.subject,
                    datatype_invalid=datatype.invalid)
        return node_info_dict
예제 #31
0
 def get_operation_group_by_id(operation_group_id):
     """ Loads OperationGroup from DB"""
     return dao.get_operationgroup_by_id(operation_group_id)
예제 #32
0
    def prepare_parameters(datatype_group_gid, back_page, color_metric=None, size_metric=None):
        """
        We suppose that there are max 2 ranges and from each operation results exactly one dataType.

        :param datatype_group_gid: the group id for the `DataType` to be visualised
        :param back_page: Page where back button will direct
        :param color_metric: String referring to metric to apply on colors
        :param size_metric:  String referring to metric to apply on sizes

        :returns: `ContextDiscretePSE`
        :raises Exception: when `datatype_group_id` is invalid (not in database)
        """
        datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
        if datatype_group is None:
            raise Exception("Selected DataTypeGroup is no longer present in the database. "
                            "It might have been remove or the specified id is not the correct one.")

        operation_group = dao.get_operationgroup_by_id(datatype_group.fk_operation_group)

        name1, values1, labels1, only_numbers1 = DiscretePSEAdapter.prepare_range_labels(operation_group,
                                                                                         operation_group.range1)
        name2, values2, labels2, only_numbers2 = DiscretePSEAdapter.prepare_range_labels(operation_group,
                                                                                         operation_group.range2)

        pse_context = ContextDiscretePSE(datatype_group_gid, color_metric, size_metric, back_page)
        pse_context.setRanges(name1, values1, labels1, name2, values2, labels2,
                              only_numbers1 and only_numbers2)
        final_dict = {}
        operations = dao.get_operations_in_group(operation_group.id)

        fake_numbers1 = dict(zip(values1, range(len(list(values1)))))
        fake_numbers2 = dict(zip(values2, range(len(list(values2)))))

        for operation_ in operations:
            if not operation_.has_finished:
                pse_context.has_started_ops = True
            range_values = eval(operation_.range_values)
            key_1 = DiscretePSEAdapter.get_value_on_axe(range_values, only_numbers1, name1, fake_numbers1)
            key_2 = DiscretePSEAdapter.get_value_on_axe(range_values, only_numbers2, name2, fake_numbers2)

            datatype = None
            if operation_.status == model.STATUS_FINISHED:
                datatypes = dao.get_results_for_operation(operation_.id)
                if len(datatypes) > 0:
                    datatype = datatypes[0]
                    if datatype.type == "DatatypeMeasure":
                        ## Load proper entity class from DB.
                        measures = dao.get_generic_entity(DatatypeMeasure, datatype.id)
                    else:
                        measures = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype')
                    pse_context.prepare_metrics_datatype(measures, datatype)

            if key_1 not in final_dict:
                final_dict[key_1] = {}

            final_dict[key_1][key_2] = pse_context.build_node_info(operation_, datatype)

        pse_context.fill_object(final_dict)
        ## datatypes_dict is not actually used in the drawing of the PSE and actually
        ## causes problems in case of NaN values, so just remove it before creating the json
        pse_context.datatypes_dict = {}
        if not only_numbers1:
            pse_context.values_x = range(len(list(values1)))
        if not only_numbers2:
            pse_context.values_y = range(len(list(values2)))
        return pse_context
예제 #33
0
    def prepare_parameters(datatype_group_gid,
                           back_page,
                           color_metric=None,
                           size_metric=None):
        """
        We suppose that there are max 2 ranges and from each operation results exactly one dataType.

        :param datatype_group_gid: the group id for the `DataType` to be visualised
        :param back_page: Page where back button will direct
        :param color_metric: a list of `DataTypeMeasure` which has been executed on `datatype_group_gid`
        :param size_metric:  a list of `DataTypeMeasure` which has been executed on `datatype_group_gid`

        :returns: `ContextDiscretePSE`
        :raises Exception: when `datatype_group_id` is invalid (not in database)
        """
        datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
        if datatype_group is None:
            raise Exception(
                "Selected DataTypeGroup is no longer present in the database. "
                "It might have been remove or the specified id is not the correct one."
            )

        operation_group = dao.get_operationgroup_by_id(
            datatype_group.fk_operation_group)

        name1, values1, labels1, only_numbers1 = DiscretePSEAdapter.prepare_range_labels(
            operation_group, operation_group.range1)
        name2, values2, labels2, only_numbers2 = DiscretePSEAdapter.prepare_range_labels(
            operation_group, operation_group.range2)

        pse_context = ContextDiscretePSE(datatype_group_gid, color_metric,
                                         size_metric, back_page)
        pse_context.setRanges(name1, values1, labels1, name2, values2, labels2,
                              only_numbers1 and only_numbers2)
        final_dict = {}
        operations = dao.get_operations_in_group(operation_group.id)

        fake_numbers1 = dict(zip(values1, range(len(list(values1)))))
        fake_numbers2 = dict(zip(values2, range(len(list(values2)))))

        for operation_ in operations:
            if not operation_.has_finished:
                pse_context.has_started_ops = True
            range_values = eval(operation_.range_values)
            key_1 = DiscretePSEAdapter.get_value_on_axe(
                range_values, only_numbers1, name1, fake_numbers1)
            key_2 = DiscretePSEAdapter.get_value_on_axe(
                range_values, only_numbers2, name2, fake_numbers2)

            datatype = None
            if operation_.status == model.STATUS_FINISHED:
                datatypes = dao.get_results_for_operation(operation_.id)
                if len(datatypes) > 0:
                    datatype = datatypes[0]
                    if datatype.type == "DatatypeMeasure":
                        ## Load proper entity class from DB.
                        measures = dao.get_generic_entity(
                            DatatypeMeasure, datatype.id)
                    else:
                        measures = dao.get_generic_entity(
                            DatatypeMeasure, datatype.gid,
                            '_analyzed_datatype')
                    pse_context.prepare_metrics_datatype(measures, datatype)

            if key_1 not in final_dict:
                final_dict[key_1] = {}

            final_dict[key_1][key_2] = pse_context.build_node_info(
                operation_, datatype)

        pse_context.fill_object(final_dict)
        ## datatypes_dict is not actually used in the drawing of the PSE and actually
        ## causes problems in case of NaN values, so just remove it before creating the json
        pse_context.datatypes_dict = {}
        if not only_numbers1:
            pse_context.values_x = range(len(list(values1)))
        if not only_numbers2:
            pse_context.values_y = range(len(list(values2)))
        return pse_context
예제 #34
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['DATA_TYPES_GROUPS']
    create_column(COL_RANGES_1, table)
    create_column(COL_RANGES_2, table)

    try:
        ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column.
        previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0",
                                                 "no_of_ranges")

        for group in previous_groups:

            operation_group = dao.get_operationgroup_by_id(
                group.fk_operation_group)
            #group.only_numeric_ranges = operation_group.has_only_numeric_ranges

            if operation_group.range3 is not None:
                group.no_of_ranges = 3
            elif operation_group.range2 is not None:
                group.no_of_ranges = 2
            elif operation_group.range1 is not None:
                group.no_of_ranges = 1
            else:
                group.no_of_ranges = 0

            dao.store_entity(group)

    except Exception as excep:
        ## we can live with a column only having default value. We will not stop the startup.
        logger = get_logger(__name__)
        logger.exception(excep)

    session = SA_SESSIONMAKER()
    session.execute(
        text("""UPDATE "OPERATIONS"
                               SET status = 
                                CASE
                                    WHEN status = 'FINISHED' THEN '4-FINISHED'
                                    WHEN status = 'STARTED' THEN '3-STARTED'
                                    WHEN status = 'CANCELED' THEN '2-CANCELED'
                                    ELSE '1-ERROR'
                                END
                             WHERE status IN ('FINISHED', 'CANCELED', 'STARTED', 'ERROR');"""
             ))
    session.commit()
    session.close()

    try:
        session = SA_SESSIONMAKER()
        # TODO: fix me
        # for sim_state in session.query(SimulationState).filter(SimulationState.fk_datatype_group is not None).all():
        #     session.delete(sim_state)
        session.commit()
        session.close()
    except Exception as excep:
        ## It might happen that SimulationState table is not yet created, e.g. if user has version 1.0.2
        logger = get_logger(__name__)
        logger.exception(excep)
예제 #35
0
    def import_list_of_operations(self,
                                  project,
                                  import_path,
                                  is_group=False,
                                  importer_operation_id=None):
        """
        This method scans provided folder and identify all operations that needs to be imported
        """
        all_dts_count = 0
        all_stored_dts_count = 0
        imported_operations = []
        ordered_operations = self._retrieve_operations_in_order(
            project, import_path, None if is_group else importer_operation_id)

        if is_group and len(ordered_operations) > 0:
            first_op = dao.get_operation_by_id(importer_operation_id)
            vm_path = h5.determine_filepath(first_op.view_model_gid,
                                            os.path.dirname(import_path))
            os.remove(vm_path)

            ordered_operations[0].operation.id = importer_operation_id

        for operation_data in ordered_operations:
            if operation_data.is_old_form:
                operation_entity, datatype_group = self.import_operation(
                    operation_data.operation)
                new_op_folder = self.storage_interface.get_project_folder(
                    project.name, str(operation_entity.id))

                try:
                    operation_datatypes = self._load_datatypes_from_operation_folder(
                        operation_data.operation_folder, operation_entity,
                        datatype_group)
                    # Create and store view_model from operation
                    self.create_view_model(operation_entity, operation_data,
                                           new_op_folder)

                    self._store_imported_datatypes_in_db(
                        project, operation_datatypes)
                    imported_operations.append(operation_entity)
                except MissingReferenceException:
                    operation_entity.status = STATUS_ERROR
                    dao.store_entity(operation_entity)

            elif operation_data.main_view_model is not None:
                operation_data.operation.create_date = datetime.now()
                operation_data.operation.start_date = datetime.now()
                operation_data.operation.completion_date = datetime.now()

                do_merge = False
                if importer_operation_id:
                    do_merge = True
                operation_entity = dao.store_entity(operation_data.operation,
                                                    merge=do_merge)
                dt_group = None
                op_group = dao.get_operationgroup_by_id(
                    operation_entity.fk_operation_group)
                if op_group:
                    dt_group = dao.get_datatypegroup_by_op_group_id(
                        op_group.id)
                    if not dt_group:
                        first_op = dao.get_operations_in_group(
                            op_group.id, only_first_operation=True)
                        dt_group = DataTypeGroup(
                            op_group,
                            operation_id=first_op.id,
                            state=DEFAULTDATASTATE_INTERMEDIATE)
                        dt_group = dao.store_entity(dt_group)
                # Store the DataTypes in db
                dts = {}
                all_dts_count += len(operation_data.dt_paths)
                for dt_path in operation_data.dt_paths:
                    dt = self.load_datatype_from_file(dt_path,
                                                      operation_entity.id,
                                                      dt_group, project.id)
                    if isinstance(dt, BurstConfiguration):
                        if op_group:
                            dt.fk_operation_group = op_group.id
                        all_stored_dts_count += self._store_or_link_burst_config(
                            dt, dt_path, project.id)
                    else:
                        dts[dt_path] = dt
                        if op_group:
                            op_group.fill_operationgroup_name(dt.type)
                            dao.store_entity(op_group)
                try:
                    stored_dts_count = self._store_imported_datatypes_in_db(
                        project, dts)
                    all_stored_dts_count += stored_dts_count

                    if operation_data.main_view_model.is_metric_operation:
                        self._update_burst_metric(operation_entity)

                    imported_operations.append(operation_entity)
                    new_op_folder = self.storage_interface.get_project_folder(
                        project.name, str(operation_entity.id))
                    view_model_disk_size = 0
                    for h5_file in operation_data.all_view_model_files:
                        view_model_disk_size += StorageInterface.compute_size_on_disk(
                            h5_file)
                        shutil.move(h5_file, new_op_folder)
                    operation_entity.view_model_disk_size = view_model_disk_size
                    dao.store_entity(operation_entity)
                except MissingReferenceException as excep:
                    self.storage_interface.remove_operation_data(
                        project.name, operation_entity.id)
                    operation_entity.fk_operation_group = None
                    dao.store_entity(operation_entity)
                    dao.remove_entity(DataTypeGroup, dt_group.id)
                    raise excep
            else:
                self.logger.warning(
                    "Folder %s will be ignored, as we could not find a serialized "
                    "operation or DTs inside!" %
                    operation_data.operation_folder)

            # We want importer_operation_id to be kept just for the first operation (the first iteration)
            if is_group:
                importer_operation_id = None

        self._update_dt_groups(project.id)
        self._update_burst_configurations(project.id)
        return imported_operations, all_dts_count, all_stored_dts_count
예제 #36
0
    def _capture_operation_results(self, result, user_tag=None):
        """
        After an operation was finished, make sure the results are stored
        in DB storage and the correct meta-data,IDs are set.
        """
        results_to_store = []
        data_type_group_id = None
        operation = dao.get_operation_by_id(self.operation_id)
        if operation.user_group is None or len(operation.user_group) == 0:
            operation.user_group = date2string(
                datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT)
            operation = dao.store_entity(operation)
        if self._is_group_launch():
            data_type_group_id = dao.get_datatypegroup_by_op_group_id(
                operation.fk_operation_group).id
        # All entities will have the same subject and state
        subject = self.meta_data[DataTypeMetaData.KEY_SUBJECT]
        state = self.meta_data[DataTypeMetaData.KEY_STATE]
        burst_reference = None
        if DataTypeMetaData.KEY_BURST in self.meta_data:
            burst_reference = self.meta_data[DataTypeMetaData.KEY_BURST]
        perpetuated_identifier = None
        if DataTypeMetaData.KEY_TAG_1 in self.meta_data:
            perpetuated_identifier = self.meta_data[DataTypeMetaData.KEY_TAG_1]

        for res in result:
            if res is None:
                continue
            res.subject = str(subject)
            res.state = state
            res.fk_parent_burst = burst_reference
            res.fk_from_operation = self.operation_id
            res.framework_metadata = self.meta_data
            if not res.user_tag_1:
                res.user_tag_1 = user_tag if user_tag is not None else perpetuated_identifier
            else:
                res.user_tag_2 = user_tag if user_tag is not None else perpetuated_identifier
            res.fk_datatype_group = data_type_group_id
            ## Compute size-on disk, in case file-storage is used
            if hasattr(res, 'storage_path') and hasattr(
                    res, 'get_storage_file_name'):
                associated_file = os.path.join(res.storage_path,
                                               res.get_storage_file_name())
                res.close_file()
                res.disk_size = self.file_handler.compute_size_on_disk(
                    associated_file)
            res = dao.store_entity(res)
            # Write metaData
            res.persist_full_metadata()
            results_to_store.append(res)
        del result[0:len(result)]
        result.extend(results_to_store)

        if len(result) and self._is_group_launch():
            ## Update the operation group name
            operation_group = dao.get_operationgroup_by_id(
                operation.fk_operation_group)
            operation_group.fill_operationgroup_name(result[0].type)
            dao.store_entity(operation_group)

        return 'Operation ' + str(
            self.operation_id) + ' has finished.', len(results_to_store)
예제 #37
0
    def load_datatype_from_file(self,
                                current_file,
                                op_id,
                                datatype_group=None,
                                current_project_id=None):
        # type: (str, int, DataTypeGroup, int) -> HasTraitsIndex
        """
        Creates an instance of datatype from storage / H5 file
        :returns: DatatypeIndex
        """
        self.logger.debug("Loading DataType from file: %s" % current_file)
        h5_class = H5File.h5_class_from_file(current_file)

        if h5_class is BurstConfigurationH5:
            if current_project_id is None:
                op_entity = dao.get_operationgroup_by_id(op_id)
                current_project_id = op_entity.fk_launched_in
            h5_file = BurstConfigurationH5(current_file)
            burst = BurstConfiguration(current_project_id)
            burst.fk_simulation = op_id
            h5_file.load_into(burst)
            result = burst
        else:
            datatype, generic_attributes = h5.load_with_links(current_file)

            already_existing_datatype = h5.load_entity_by_gid(datatype.gid)
            if datatype_group is not None and already_existing_datatype is not None:
                raise DatatypeGroupImportException(
                    "The datatype group that you are trying to import"
                    " already exists!")
            index_class = h5.REGISTRY.get_index_for_datatype(
                datatype.__class__)
            datatype_index = index_class()
            datatype_index.fill_from_has_traits(datatype)
            datatype_index.fill_from_generic_attributes(generic_attributes)

            if datatype_group is not None and hasattr(datatype_index, 'fk_source_gid') and \
                    datatype_index.fk_source_gid is not None:
                ts = h5.load_entity_by_gid(datatype_index.fk_source_gid)

                if ts is None:
                    op = dao.get_operations_in_group(
                        datatype_group.fk_operation_group,
                        only_first_operation=True)
                    op.fk_operation_group = None
                    dao.store_entity(op)
                    dao.remove_entity(OperationGroup,
                                      datatype_group.fk_operation_group)
                    dao.remove_entity(DataTypeGroup, datatype_group.id)
                    raise DatatypeGroupImportException(
                        "Please import the time series group before importing the"
                        " datatype measure group!")

            # Add all the required attributes
            if datatype_group:
                datatype_index.fk_datatype_group = datatype_group.id
                if len(datatype_group.subject) == 0:
                    datatype_group.subject = datatype_index.subject
                    dao.store_entity(datatype_group)
            datatype_index.fk_from_operation = op_id

            associated_file = h5.path_for_stored_index(datatype_index)
            if os.path.exists(associated_file):
                datatype_index.disk_size = StorageInterface.compute_size_on_disk(
                    associated_file)
            result = datatype_index

        return result
예제 #38
0
    def import_project_operations(self,
                                  project,
                                  import_path,
                                  is_group=False,
                                  importer_operation_id=None):
        """
        This method scans provided folder and identify all operations that needs to be imported
        """
        all_dts_count = 0
        all_stored_dts_count = 0
        imported_operations = []
        ordered_operations = self._retrieve_operations_in_order(
            project, import_path, importer_operation_id)

        for operation_data in ordered_operations:

            if operation_data.is_old_form:
                operation_entity, datatype_group = self.import_operation(
                    operation_data.operation)
                new_op_folder = self.files_helper.get_project_folder(
                    project, str(operation_entity.id))

                try:
                    operation_datatypes = self._load_datatypes_from_operation_folder(
                        operation_data.operation_folder, operation_entity,
                        datatype_group)
                    # Create and store view_model from operation
                    self.create_view_model(operation_entity, operation_data,
                                           new_op_folder)

                    self._store_imported_datatypes_in_db(
                        project, operation_datatypes)
                    imported_operations.append(operation_entity)
                except MissingReferenceException:
                    operation_entity.status = STATUS_ERROR
                    dao.store_entity(operation_entity)

            elif operation_data.main_view_model is not None:
                do_merge = False
                if importer_operation_id:
                    do_merge = True
                operation_entity = dao.store_entity(operation_data.operation,
                                                    merge=do_merge)
                dt_group = None
                op_group = dao.get_operationgroup_by_id(
                    operation_entity.fk_operation_group)
                if op_group:
                    dt_group = dao.get_datatypegroup_by_op_group_id(
                        op_group.id)
                    if not dt_group:
                        first_op = dao.get_operations_in_group(
                            op_group.id, only_first_operation=True)
                        dt_group = DataTypeGroup(
                            op_group,
                            operation_id=first_op.id,
                            state=DEFAULTDATASTATE_INTERMEDIATE)
                        dt_group = dao.store_entity(dt_group)
                # Store the DataTypes in db
                dts = {}
                all_dts_count += len(operation_data.dt_paths)
                for dt_path in operation_data.dt_paths:
                    dt = self.load_datatype_from_file(dt_path,
                                                      operation_entity.id,
                                                      dt_group, project.id)
                    if isinstance(dt, BurstConfiguration):
                        if op_group:
                            dt.fk_operation_group = op_group.id
                        all_stored_dts_count += self._store_or_link_burst_config(
                            dt, dt_path, project.id)
                    else:
                        dts[dt_path] = dt
                        if op_group:
                            op_group.fill_operationgroup_name(dt.type)
                            dao.store_entity(op_group)
                try:
                    stored_dts_count = self._store_imported_datatypes_in_db(
                        project, dts)
                    all_stored_dts_count += stored_dts_count

                    if operation_data.main_view_model.is_metric_operation:
                        self._update_burst_metric(operation_entity)

                    #TODO: TVB-2849 to reveiw these flags and simplify condition
                    if stored_dts_count > 0 or (
                            not operation_data.is_self_generated and
                            not is_group) or importer_operation_id is not None:
                        imported_operations.append(operation_entity)
                        new_op_folder = self.files_helper.get_project_folder(
                            project, str(operation_entity.id))
                        view_model_disk_size = 0
                        for h5_file in operation_data.all_view_model_files:
                            view_model_disk_size += FilesHelper.compute_size_on_disk(
                                h5_file)
                            shutil.move(h5_file, new_op_folder)
                        operation_entity.view_model_disk_size = view_model_disk_size
                        dao.store_entity(operation_entity)
                    else:
                        # In case all Dts under the current operation were Links and the ViewModel is dummy,
                        # don't keep the Operation empty in DB
                        dao.remove_entity(Operation, operation_entity.id)
                        self.files_helper.remove_operation_data(
                            project.name, operation_entity.id)
                except MissingReferenceException as excep:
                    dao.remove_entity(Operation, operation_entity.id)
                    self.files_helper.remove_operation_data(
                        project.name, operation_entity.id)
                    raise excep
            else:
                self.logger.warning(
                    "Folder %s will be ignored, as we could not find a serialized "
                    "operation or DTs inside!" %
                    operation_data.operation_folder)

        self._update_dt_groups(project.id)
        self._update_burst_configurations(project.id)
        return imported_operations, all_dts_count, all_stored_dts_count
예제 #39
0
 def get_operation_group_by_id(operation_group_id):
     """ Loads OperationGroup from DB"""
     return dao.get_operationgroup_by_id(operation_group_id)