예제 #1
0
    def _prepare_metric_operation(self, sim_operation):
        # type: (Operation) -> Operation
        metric_algo = dao.get_algorithm_by_module(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS)
        datatype_index = h5.REGISTRY.get_index_for_datatype(TimeSeries)
        time_series_index = dao.get_generic_entity(datatype_index, sim_operation.id, 'fk_from_operation')[0]
        ga = self.prepare_metadata(metric_algo.algorithm_category, time_series_index.fk_parent_burst)
        ga.visible = False

        view_model = get_class_by_name("{}.{}".format(MEASURE_METRICS_MODULE, MEASURE_METRICS_MODEL_CLASS))()
        view_model.time_series = time_series_index.gid
        view_model.algorithms = tuple(ALGORITHMS.keys())
        view_model.generic_attributes = ga

        parent_burst = dao.get_generic_entity(BurstConfiguration, time_series_index.fk_parent_burst, 'gid')[0]
        metric_op_group = dao.get_operationgroup_by_id(parent_burst.fk_metric_operation_group)
        metric_operation_group_id = parent_burst.fk_metric_operation_group
        range_values = sim_operation.range_values
        view_model.operation_group_gid = uuid.UUID(metric_op_group.gid)
        view_model.ranges = json.dumps(parent_burst.ranges)
        view_model.range_values = range_values
        view_model.is_metric_operation = True
        metric_operation = Operation(view_model.gid.hex, sim_operation.fk_launched_by, sim_operation.fk_launched_in,
                                     metric_algo.id, user_group=ga.operation_tag, op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        metric_operation = dao.store_entity(metric_operation)

        metrics_datatype_group = dao.get_generic_entity(DataTypeGroup, metric_operation_group_id,
                                                        'fk_operation_group')[0]
        if metrics_datatype_group.fk_from_operation is None:
            metrics_datatype_group.fk_from_operation = metric_operation.id
            dao.store_entity(metrics_datatype_group)

        self.store_view_model(metric_operation, sim_operation.project, view_model)
        return metric_operation
예제 #2
0
    def prepare_operation(self,
                          user_id,
                          project_id,
                          algorithm,
                          view_model_gid,
                          op_group=None,
                          ranges=None,
                          visible=True):

        op_group_id = None
        if op_group:
            op_group_id = op_group.id
        if isinstance(view_model_gid, uuid.UUID):
            view_model_gid = view_model_gid.hex

        operation = Operation(view_model_gid,
                              user_id,
                              project_id,
                              algorithm.id,
                              op_group_id=op_group_id,
                              range_values=ranges)
        self.logger.debug("Saving Operation(userId=" + str(user_id) +
                          ",projectId=" + str(project_id) + ",algorithmId=" +
                          str(algorithm.id) + ", ops_group= " +
                          str(op_group_id) + ")")

        operation.visible = visible
        operation = dao.store_entity(operation)
        return operation
예제 #3
0
    def prepare_metrics_operation(operation):
        # TODO reuse from OperationService and do not duplicate logic here
        parent_burst = dao.get_generic_entity(BurstConfiguration, operation.fk_operation_group, 'fk_operation_group')[0]
        metric_operation_group_id = parent_burst.fk_metric_operation_group
        range_values = operation.range_values
        metric_algo = dao.get_algorithm_by_module(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS)

        metric_operation = Operation(None, operation.fk_launched_by, operation.fk_launched_in, metric_algo.id,
                                     status=STATUS_FINISHED, op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        metric_operation = dao.store_entity(metric_operation)
        op_dir = StorageInterface().get_project_folder(operation.project.name, str(metric_operation.id))
        return op_dir, metric_operation
예제 #4
0
    def prepare_operations(self, user_id, project, algorithm, category,
                           visible=True, existing_dt_group=None, view_model=None, **kwargs):
        """
        Do all the necessary preparations for storing an operation. If it's the case of a 
        range of values create an operation group and multiple operations for each possible
        instance from the range.
        """
        operations = []

        available_args, group = self._prepare_group(project.id, existing_dt_group, kwargs)
        if len(available_args) > TvbProfile.current.MAX_RANGE_NUMBER:
            raise LaunchException("Too big range specified. You should limit the"
                                  " resulting operations to %d" % TvbProfile.current.MAX_RANGE_NUMBER)
        else:
            self.logger.debug("Launching a range with %d operations..." % len(available_args))
        group_id = None
        if group is not None:
            group_id = group.id
        ga = self._prepare_metadata(category, kwargs, group)
        ga.visible = visible
        view_model.generic_attributes = ga

        self.logger.debug("Saving Operation(userId=" + str(user_id) + ",projectId=" + str(project.id) +
                          ",algorithmId=" + str(algorithm.id) + ", ops_group= " + str(group_id) + ")")

        for (one_set_of_args, range_vals) in available_args:
            range_values = json.dumps(range_vals) if range_vals else None
            operation = Operation(user_id, project.id, algorithm.id, json.dumps({'gid': view_model.gid.hex}),
                                  op_group_id=group_id, user_group=ga.operation_tag, range_values=range_values)
            operation.visible = visible
            operations.append(operation)
        operations = dao.store_entities(operations)

        if group is not None:
            if existing_dt_group is None:
                datatype_group = DataTypeGroup(group, operation_id=operations[0].id, state=category.defaultdatastate)
                dao.store_entity(datatype_group)
            else:
                # Reset count
                existing_dt_group.count_results = None
                dao.store_entity(existing_dt_group)

        for operation in operations:
            self._store_view_model(operation, project, view_model)

        return operations, group
예제 #5
0
    def _prepare_metric_operation(self, sim_operation):
        # type: (Operation) -> Operation
        metric_algo = dao.get_algorithm_by_module(MEASURE_METRICS_MODULE,
                                                  MEASURE_METRICS_CLASS)
        datatype_index = h5.REGISTRY.get_index_for_datatype(TimeSeries)
        time_series_index = dao.get_generic_entity(datatype_index,
                                                   sim_operation.id,
                                                   'fk_from_operation')[0]

        view_model = get_class_by_name("{}.{}".format(
            MEASURE_METRICS_MODULE, MEASURE_METRICS_MODEL_CLASS))()
        view_model.time_series = time_series_index.gid
        view_model.algorithms = tuple(choices.values())

        range_values = sim_operation.range_values
        metadata = {
            DataTypeMetaData.KEY_BURST: time_series_index.fk_parent_burst
        }
        metadata, user_group = self._prepare_metadata(
            metadata, metric_algo.algorithm_category, None, {})
        meta_str = json.dumps(metadata)

        parent_burst = dao.get_generic_entity(
            BurstConfiguration, time_series_index.fk_parent_burst, 'id')[0]
        metric_operation_group_id = parent_burst.fk_metric_operation_group
        metric_operation = Operation(sim_operation.fk_launched_by,
                                     sim_operation.fk_launched_in,
                                     metric_algo.id,
                                     json.dumps({'gid': view_model.gid.hex}),
                                     meta_str,
                                     op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        stored_metric_operation = dao.store_entity(metric_operation)

        metrics_datatype_group = dao.get_generic_entity(
            DataTypeGroup, metric_operation_group_id, 'fk_operation_group')[0]
        if metrics_datatype_group.fk_from_operation is None:
            metrics_datatype_group.fk_from_operation = metric_operation.id

        self._store_view_model(stored_metric_operation, sim_operation.project,
                               view_model)
        return stored_metric_operation
예제 #6
0
    def _prepare_metric_operation(self, sim_operation):
        # type: (Operation) -> None
        metric_algo = dao.get_algorithm_by_module(
            TimeseriesMetricsAdapter.__module__,
            TimeseriesMetricsAdapter.__name__)
        time_series_index = dao.get_generic_entity(TimeSeriesIndex,
                                                   sim_operation.id,
                                                   'fk_from_operation')[0]

        view_model = TimeseriesMetricsAdapterModel()
        view_model.time_series = time_series_index.gid
        view_model.algorithms = tuple(choices.values())

        range_values = sim_operation.range_values
        metadata = {
            DataTypeMetaData.KEY_BURST: time_series_index.fk_parent_burst
        }
        metadata, user_group = self._prepare_metadata(
            metadata, metric_algo.algorithm_category, None, {})
        meta_str = json.dumps(metadata)

        parent_burst = dao.get_generic_entity(
            BurstConfiguration, time_series_index.fk_parent_burst, 'id')[0]
        metric_operation_group_id = parent_burst.fk_metric_operation_group
        metric_operation = Operation(sim_operation.fk_launched_by,
                                     sim_operation.fk_launched_in,
                                     metric_algo.id,
                                     json.dumps({'gid': view_model.gid.hex}),
                                     meta_str,
                                     op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        stored_metric_operation = dao.store_entity(metric_operation)

        metrics_datatype_group = dao.get_generic_entity(
            DataTypeGroup, metric_operation_group_id, 'fk_operation_group')[0]
        if metrics_datatype_group.fk_from_operation is None:
            metrics_datatype_group.fk_from_operation = metric_operation.id

        OperationService._store_view_model(stored_metric_operation,
                                           sim_operation.project, view_model)
        return stored_metric_operation
예제 #7
0
    def __copy_linked_datatype_before_delete(self, op, datatype, project, fk_to_project):
        new_op = Operation(op.view_model_gid,
                           dao.get_system_user().id,
                           fk_to_project,
                           datatype.parent_operation.fk_from_algo,
                           datatype.parent_operation.status,
                           datatype.parent_operation.start_date,
                           datatype.parent_operation.completion_date,
                           datatype.parent_operation.fk_operation_group,
                           datatype.parent_operation.additional_info,
                           datatype.parent_operation.user_group,
                           datatype.parent_operation.range_values)
        new_op.visible = datatype.parent_operation.visible
        new_op = dao.store_entity(new_op)
        to_project = self.find_project(fk_to_project)
        to_project_path = self.storage_interface.get_project_folder(to_project.name)

        full_path = h5.path_for_stored_index(datatype)
        old_folder = self.storage_interface.get_project_folder(project.name, str(op.id))
        file_paths = h5.gather_references_of_view_model(op.view_model_gid, old_folder, only_view_models=True)[0]
        file_paths.append(full_path)

        # The BurstConfiguration h5 file has to be moved only when we handle the time series which has the operation
        # folder containing the file
        if datatype.is_ts and datatype.fk_parent_burst is not None:
            bc_path = h5.path_for(datatype.parent_operation.id, BurstConfigurationH5, datatype.fk_parent_burst,
                                  project.name)
            if os.path.exists(bc_path):
                file_paths.append(bc_path)

                bc = dao.get_burst_for_operation_id(op.id)
                bc.fk_simulation = new_op.id
                dao.store_entity(bc)

        # Move all files to the new operation folder
        self.storage_interface.move_datatype_with_sync(to_project, to_project_path, new_op.id, file_paths)

        datatype.fk_from_operation = new_op.id
        datatype.parent_operation = new_op
        dao.store_entity(datatype)

        return new_op
예제 #8
0
    def _prepare_metric_operation(self, sim_operation):
        # type: (Operation) -> None
        metric_algo = dao.get_algorithm_by_module(
            TimeseriesMetricsAdapter.__module__,
            TimeseriesMetricsAdapter.__name__)

        time_series_index = dao.get_generic_entity(TimeSeriesIndex,
                                                   sim_operation.id,
                                                   'fk_from_operation')[0]
        ts_metrics_adapter_form = TimeseriesMetricsAdapterForm()
        ts_metrics_adapter_form.fill_from_trait(
            BaseTimeseriesMetricAlgorithm())
        ts_metrics_adapter_form.time_series.data = time_series_index.gid
        op_params = json.dumps(ts_metrics_adapter_form.get_dict())
        range_values = sim_operation.range_values
        metadata = {
            DataTypeMetaData.KEY_BURST: time_series_index.fk_parent_burst
        }
        metadata, user_group = self._prepare_metadata(
            metadata, metric_algo.algorithm_category, None, op_params)
        meta_str = json.dumps(metadata)

        parent_burst = dao.get_generic_entity(
            BurstConfiguration2, time_series_index.fk_parent_burst, 'id')[0]
        metric_operation_group_id = parent_burst.metric_operation_group_id
        metric_operation = Operation(sim_operation.fk_launched_by,
                                     sim_operation.fk_launched_in,
                                     metric_algo.id,
                                     op_params,
                                     meta_str,
                                     op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        operation = dao.store_entity(metric_operation)

        metrics_datatype_group = dao.get_generic_entity(
            DataTypeGroup, metric_operation_group_id, 'fk_operation_group')[0]
        if metrics_datatype_group.fk_from_operation is None:
            metrics_datatype_group.fk_from_operation = metric_operation.id

        return operation
예제 #9
0
    def prepare_operation(self,
                          user_id,
                          project_id,
                          algorithm_id,
                          category,
                          view_model_gid,
                          op_group,
                          metadata,
                          ranges=None,
                          visible=True):
        operation_parameters = json.dumps({'gid': view_model_gid})
        metadata, user_group = self._prepare_metadata(metadata, category,
                                                      op_group, {})
        meta_str = json.dumps(metadata)

        op_group_id = None
        if op_group:
            op_group_id = op_group.id

        operation = Operation(user_id,
                              project_id,
                              algorithm_id,
                              operation_parameters,
                              op_group_id=op_group_id,
                              meta=meta_str,
                              range_values=ranges)

        self.logger.debug("Saving Operation(userId=" + str(user_id) +
                          ",projectId=" + str(project_id) + "," +
                          str(metadata) + ",algorithmId=" + str(algorithm_id) +
                          ", ops_group= " + str(op_group_id) + ")")

        visible_operation = visible and category.display is False
        operation = dao.store_entity(operation)
        operation.visible = visible_operation

        return operation
예제 #10
0
    def _retrieve_operations_in_order(self,
                                      project,
                                      import_path,
                                      importer_operation_id=None):
        # type: (Project, str, int) -> list[Operation2ImportData]
        retrieved_operations = []

        for root, _, files in os.walk(import_path):
            if OPERATION_XML in files:
                # Previous Operation format for uploading previous versions of projects
                operation_file_path = os.path.join(root, OPERATION_XML)
                operation, operation_xml_parameters, _ = self.build_operation_from_file(
                    project, operation_file_path)
                operation.import_file = operation_file_path
                self.logger.debug("Found operation in old XML format: " +
                                  str(operation))
                retrieved_operations.append(
                    Operation2ImportData(
                        operation,
                        root,
                        info_from_xml=operation_xml_parameters))

            else:
                # We strive for the new format with ViewModelH5
                main_view_model = None
                dt_paths = []
                all_view_model_files = []
                for file in files:
                    if file.endswith(FilesHelper.TVB_STORAGE_FILE_EXTENSION):
                        h5_file = os.path.join(root, file)
                        try:
                            h5_class = H5File.h5_class_from_file(h5_file)
                            if h5_class is ViewModelH5:
                                all_view_model_files.append(h5_file)
                                if not main_view_model:
                                    view_model = h5.load_view_model_from_file(
                                        h5_file)
                                    if type(
                                            view_model
                                    ) in self.view_model2adapter.keys():
                                        main_view_model = view_model
                            else:
                                file_update_manager = FilesUpdateManager()
                                file_update_manager.upgrade_file(h5_file)
                                dt_paths.append(h5_file)
                        except Exception:
                            self.logger.warning(
                                "Unreadable H5 file will be ignored: %s" %
                                h5_file)

                if main_view_model is not None:
                    alg = self.view_model2adapter[type(main_view_model)]
                    op_group_id = None
                    if main_view_model.operation_group_gid:
                        op_group = dao.get_operationgroup_by_gid(
                            main_view_model.operation_group_gid.hex)
                        if not op_group:
                            op_group = OperationGroup(
                                project.id,
                                ranges=json.loads(main_view_model.ranges),
                                gid=main_view_model.operation_group_gid.hex)
                            op_group = dao.store_entity(op_group)
                        op_group_id = op_group.id
                    operation = Operation(
                        main_view_model.gid.hex,
                        project.fk_admin,
                        project.id,
                        alg.id,
                        status=STATUS_FINISHED,
                        user_group=main_view_model.generic_attributes.
                        operation_tag,
                        start_date=datetime.now(),
                        completion_date=datetime.now(),
                        op_group_id=op_group_id,
                        range_values=main_view_model.range_values)
                    operation.create_date = main_view_model.create_date
                    operation.visible = main_view_model.generic_attributes.visible
                    self.logger.debug(
                        "Found main ViewModel to create operation for it: " +
                        str(operation))

                    retrieved_operations.append(
                        Operation2ImportData(operation, root, main_view_model,
                                             dt_paths, all_view_model_files))

                elif len(dt_paths) > 0:
                    alg = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE,
                                                      TVB_IMPORTER_CLASS)
                    default_adapter = ABCAdapter.build_adapter(alg)
                    view_model = default_adapter.get_view_model_class()()
                    view_model.data_file = dt_paths[0]
                    vm_path = h5.store_view_model(view_model, root)
                    all_view_model_files.append(vm_path)
                    operation = Operation(view_model.gid.hex,
                                          project.fk_admin,
                                          project.id,
                                          alg.id,
                                          status=STATUS_FINISHED,
                                          start_date=datetime.now(),
                                          completion_date=datetime.now())
                    self.logger.debug(
                        "Found no ViewModel in folder, so we default to " +
                        str(operation))

                    if importer_operation_id:
                        operation.id = importer_operation_id

                    retrieved_operations.append(
                        Operation2ImportData(operation, root, view_model,
                                             dt_paths, all_view_model_files,
                                             True))

        return sorted(retrieved_operations,
                      key=lambda op_data: op_data.order_field)
예제 #11
0
    def prepare_operations_for_workflowsteps(self, workflow_step_list,
                                             workflows, user_id, burst_id,
                                             project_id, group,
                                             sim_operations):
        """
        Create and store Operation entities from a list of Workflow Steps.
        Will be generated workflows x workflow_step_list Operations.
        For every step in workflow_step_list one OperationGroup and one DataTypeGroup will be created 
        (in case of PSE).
        """

        for step in workflow_step_list:
            operation_group = None
            if (group is not None) and not isinstance(step, WorkflowStepView):
                operation_group = OperationGroup(project_id=project_id,
                                                 ranges=group.range_references)
                operation_group = dao.store_entity(operation_group)

            operation = None
            metadata = {DataTypeMetaData.KEY_BURST: burst_id}
            algo_category = dao.get_algorithm_by_id(step.fk_algorithm)
            if algo_category is not None:
                algo_category = algo_category.algorithm_category

            for wf_idx, workflow in enumerate(workflows):
                cloned_w_step = step.clone()
                cloned_w_step.fk_workflow = workflow.id
                dynamic_params = cloned_w_step.dynamic_param
                op_params = cloned_w_step.static_param
                op_params.update(dynamic_params)
                range_values = None
                group_id = None
                if operation_group is not None:
                    group_id = operation_group.id
                    range_values = sim_operations[wf_idx].range_values

                if not isinstance(step, WorkflowStepView):
                    ## For visualization steps, do not create operations, as those are not really needed.
                    metadata, user_group = self._prepare_metadata(
                        metadata, algo_category, operation_group, op_params)
                    operation = Operation(user_id,
                                          project_id,
                                          step.fk_algorithm,
                                          json.dumps(
                                              op_params,
                                              cls=MapAsJson.MapAsJsonEncoder),
                                          meta=json.dumps(metadata),
                                          op_group_id=group_id,
                                          range_values=range_values,
                                          user_group=user_group)
                    operation.visible = step.step_visible
                    operation = dao.store_entity(operation)
                    cloned_w_step.fk_operation = operation.id

                dao.store_entity(cloned_w_step)

            if operation_group is not None and operation is not None:
                datatype_group = DataTypeGroup(
                    operation_group,
                    operation_id=operation.id,
                    fk_parent_burst=burst_id,
                    state=metadata[DataTypeMetaData.KEY_STATE])
                dao.store_entity(datatype_group)
예제 #12
0
    def prepare_operations(self,
                           user_id,
                           project_id,
                           algorithm,
                           category,
                           metadata,
                           visible=True,
                           existing_dt_group=None,
                           **kwargs):
        """
        Do all the necessary preparations for storing an operation. If it's the case of a 
        range of values create an operation group and multiple operations for each possible
        instance from the range.
        :param metadata: Initial MetaData with potential Burst identification inside.
        """
        operations = []

        available_args, group = self._prepare_group(project_id,
                                                    existing_dt_group, kwargs)
        if len(available_args) > TvbProfile.current.MAX_RANGE_NUMBER:
            raise LaunchException(
                "Too big range specified. You should limit the"
                " resulting operations to %d" %
                TvbProfile.current.MAX_RANGE_NUMBER)
        else:
            self.logger.debug("Launching a range with %d operations..." %
                              len(available_args))
        group_id = None
        if group is not None:
            group_id = group.id
        metadata, user_group = self._prepare_metadata(metadata, category,
                                                      group, kwargs)

        self.logger.debug("Saving Operation(userId=" + str(user_id) +
                          ",projectId=" + str(project_id) + "," +
                          str(metadata) + ",algorithmId=" + str(algorithm.id) +
                          ", ops_group= " + str(group_id) + ")")

        visible_operation = visible and category.display is False
        meta_str = json.dumps(metadata)
        for (one_set_of_args, range_vals) in available_args:
            range_values = json.dumps(range_vals) if range_vals else None
            operation = Operation(user_id,
                                  project_id,
                                  algorithm.id,
                                  json.dumps(one_set_of_args),
                                  meta_str,
                                  op_group_id=group_id,
                                  user_group=user_group,
                                  range_values=range_values)
            operation.visible = visible_operation
            operations.append(operation)
        operations = dao.store_entities(operations)

        if group is not None:
            burst_id = None
            if DataTypeMetaData.KEY_BURST in metadata:
                burst_id = metadata[DataTypeMetaData.KEY_BURST]
            if existing_dt_group is None:
                datatype_group = DataTypeGroup(
                    group,
                    operation_id=operations[0].id,
                    fk_parent_burst=burst_id,
                    state=metadata[DataTypeMetaData.KEY_STATE])
                dao.store_entity(datatype_group)
            else:
                # Reset count
                existing_dt_group.count_results = None
                dao.store_entity(existing_dt_group)

        return operations, group