Esempio n. 1
0
    def build(algorithm=None,
              test_user=None,
              test_project=None,
              operation_status=STATUS_FINISHED,
              parameters="test params",
              range_values=None):
        """
        Create persisted operation.
        :param algorithm: When not None, Simulator.
        :return: Operation entity after persistence.
        """
        if algorithm is None:
            algorithm = dao.get_algorithm_by_module(
                'tvb.adapters.simulator.simulator_adapter', 'SimulatorAdapter')
        if test_user is None:
            test_user = user_factory()
        if test_project is None:
            test_project = project_factory(test_user)

        operation = Operation(test_user.id,
                              test_project.id,
                              algorithm.id,
                              parameters,
                              status=operation_status,
                              range_values=range_values)
        dao.store_entity(operation)
        # Make sure lazy attributes are correctly loaded.
        return dao.get_operation_by_id(operation.id)
Esempio n. 2
0
    def _prepare_operation(self,
                           project_id,
                           user_id,
                           simulator_id,
                           simulator_gid,
                           algo_category,
                           op_group,
                           metadata,
                           ranges=None):
        operation_parameters = json.dumps({'gid': simulator_gid.hex})
        metadata, user_group = self.operation_service._prepare_metadata(
            metadata, algo_category, op_group, {})
        meta_str = json.dumps(metadata)

        op_group_id = None
        if op_group:
            op_group_id = op_group.id

        operation = Operation(user_id,
                              project_id,
                              simulator_id,
                              operation_parameters,
                              op_group_id=op_group_id,
                              meta=meta_str,
                              range_values=ranges)

        self.logger.info("Saving Operation(userId=" + str(user_id) +
                         ", projectId=" + str(project_id) + "," +
                         str(metadata) + ", algorithmId=" + str(simulator_id) +
                         ", ops_group= " + str(op_group_id) + ", params=" +
                         str(operation_parameters) + ")")

        operation = dao.store_entity(operation)
        # TODO: prepare portlets/handle operation groups/no workflows
        return operation
    def test_adapter_huge_memory_requirement(self, test_adapter_factory):
        """
        Test that an MemoryException is raised in case adapter cant launch due to lack of memory.
        """
        # Prepare adapter
        test_adapter_factory(adapter_class=TestAdapterHugeMemoryRequired)
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHugeMemoryRequired")

        # Simulate receiving POST data
        form = TestAdapterHugeMemoryRequiredForm()

        view_model = form.get_view_model()()
        view_model.test = 5

        # Prepare operation for launch
        operation = Operation(view_model.gid.hex,
                              self.test_user.id,
                              self.test_project.id,
                              adapter.stored_adapter.id,
                              status=STATUS_STARTED)
        operation = dao.store_entity(operation)

        # Store ViewModel in H5
        parent_folder = FilesHelper().get_project_folder(
            self.test_project, str(operation.id))
        h5.store_view_model(view_model, parent_folder)

        # Launch operation
        with pytest.raises(NoMemoryAvailableException):
            OperationService().initiate_prelaunch(operation, adapter)
Esempio n. 4
0
    def build(algorithm=None,
              test_user=None,
              test_project=None,
              operation_status=STATUS_FINISHED,
              parameters="test params",
              meta=None):
        """
        Create persisted operation.
        :param algorithm: When not None, Simulator.
        :return: Operation entity after persistence.
        """
        if algorithm is None:
            algorithm = dao.get_algorithm_by_module(
                'tvb.adapters.simulator.simulator_adapter', 'SimulatorAdapter')
        if test_user is None:
            test_user = user_factory()
        if test_project is None:
            test_project = project_factory(test_user)

        if meta is None:
            meta = {
                DataTypeMetaData.KEY_SUBJECT: "John Doe",
                DataTypeMetaData.KEY_STATE: "RAW_DATA"
            }
        operation = Operation(test_user.id,
                              test_project.id,
                              algorithm.id,
                              parameters,
                              meta=json.dumps(meta),
                              status=operation_status)
        dao.store_entity(operation)
        # Make sure lazy attributes are correctly loaded.
        return dao.get_operation_by_id(operation.id)
Esempio n. 5
0
    def _prepare_metric_operation(self, sim_operation):
        # type: (Operation) -> Operation
        metric_algo = dao.get_algorithm_by_module(MEASURE_METRICS_MODULE,
                                                  MEASURE_METRICS_CLASS)
        datatype_index = h5.REGISTRY.get_index_for_datatype(TimeSeries)
        time_series_index = dao.get_generic_entity(datatype_index,
                                                   sim_operation.id,
                                                   'fk_from_operation')[0]
        ga = self.prepare_metadata(metric_algo.algorithm_category,
                                   time_series_index.fk_parent_burst)
        ga.visible = False

        view_model = get_class_by_name("{}.{}".format(
            MEASURE_METRICS_MODULE, MEASURE_METRICS_MODEL_CLASS))()
        view_model.time_series = time_series_index.gid
        view_model.algorithms = tuple(ALGORITHMS.keys())
        view_model.generic_attributes = ga

        parent_burst = dao.get_generic_entity(
            BurstConfiguration, time_series_index.fk_parent_burst, 'gid')[0]
        metric_op_group = dao.get_operationgroup_by_id(
            parent_burst.fk_metric_operation_group)
        metric_operation_group_id = parent_burst.fk_metric_operation_group
        range_values = sim_operation.range_values
        view_model.operation_group_gid = uuid.UUID(metric_op_group.gid)
        view_model.ranges = json.dumps(parent_burst.ranges)
        view_model.range_values = range_values
        view_model.is_metric_operation = True
        metric_operation = Operation(view_model.gid.hex,
                                     sim_operation.fk_launched_by,
                                     sim_operation.fk_launched_in,
                                     metric_algo.id,
                                     user_group=ga.operation_tag,
                                     op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        metric_operation = dao.store_entity(metric_operation)

        metrics_datatype_group = dao.get_generic_entity(
            DataTypeGroup, metric_operation_group_id, 'fk_operation_group')[0]
        if metrics_datatype_group.fk_from_operation is None:
            metrics_datatype_group.fk_from_operation = metric_operation.id
            dao.store_entity(metrics_datatype_group)

        self.store_view_model(metric_operation, sim_operation.project,
                              view_model)
        return metric_operation
Esempio n. 6
0
    def _remove_project_node_files(self,
                                   project_id,
                                   gid,
                                   skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)
            links = dao.get_links_for_datatype(datatype.id)
            if links:
                was_link = False
                for link in links:
                    # This means it's only a link and we need to remove it
                    if link.fk_from_datatype == datatype.id and link.fk_to_project == project.id:
                        dao.remove_entity(Links, link.id)
                        was_link = True
                if not was_link:
                    # Create a clone of the operation
                    new_op = Operation(
                        dao.get_system_user().id, links[0].fk_to_project,
                        datatype.parent_operation.fk_from_algo,
                        datatype.parent_operation.parameters,
                        datatype.parent_operation.meta_data,
                        datatype.parent_operation.status,
                        datatype.parent_operation.start_date,
                        datatype.parent_operation.completion_date,
                        datatype.parent_operation.fk_operation_group,
                        datatype.parent_operation.additional_info,
                        datatype.parent_operation.user_group,
                        datatype.parent_operation.range_values)
                    new_op = dao.store_entity(new_op)
                    to_project = self.find_project(links[0].fk_to_project).name
                    new_op_loaded = dao.get_operation_by_id(new_op.id)
                    self.structure_helper.write_operation_metadata(
                        new_op_loaded)
                    full_path = h5.path_for_stored_index(datatype)
                    self.structure_helper.move_datatype(
                        datatype, to_project, str(new_op.id), full_path)
                    datatype.fk_from_operation = new_op.id
                    datatype.parent_operation = new_op
                    dao.store_entity(datatype)
                    dao.remove_entity(Links, links[0].id)
            else:
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)
                h5_path = h5.path_for_stored_index(datatype)
                self.structure_helper.remove_datatype_file(h5_path)

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException(
                "Remove operation failed for unknown reasons.Please contact system administrator."
            )
    def prepare_operations(self, user_id, project, algorithm, category,
                           visible=True, existing_dt_group=None, view_model=None, **kwargs):
        """
        Do all the necessary preparations for storing an operation. If it's the case of a 
        range of values create an operation group and multiple operations for each possible
        instance from the range.
        """
        operations = []

        available_args, group = self._prepare_group(project.id, existing_dt_group, kwargs)
        if len(available_args) > TvbProfile.current.MAX_RANGE_NUMBER:
            raise LaunchException("Too big range specified. You should limit the"
                                  " resulting operations to %d" % TvbProfile.current.MAX_RANGE_NUMBER)
        else:
            self.logger.debug("Launching a range with %d operations..." % len(available_args))
        group_id = None
        if group is not None:
            group_id = group.id
        ga = self._prepare_metadata(category, kwargs, group)
        ga.visible = visible
        view_model.generic_attributes = ga

        self.logger.debug("Saving Operation(userId=" + str(user_id) + ",projectId=" + str(project.id) +
                          ",algorithmId=" + str(algorithm.id) + ", ops_group= " + str(group_id) + ")")

        for (one_set_of_args, range_vals) in available_args:
            range_values = json.dumps(range_vals) if range_vals else None
            operation = Operation(user_id, project.id, algorithm.id, json.dumps({'gid': view_model.gid.hex}),
                                  op_group_id=group_id, user_group=ga.operation_tag, range_values=range_values)
            operation.visible = visible
            operations.append(operation)
        operations = dao.store_entities(operations)

        if group is not None:
            if existing_dt_group is None:
                datatype_group = DataTypeGroup(group, operation_id=operations[0].id, state=category.defaultdatastate)
                dao.store_entity(datatype_group)
            else:
                # Reset count
                existing_dt_group.count_results = None
                dao.store_entity(existing_dt_group)

        for operation in operations:
            self._store_view_model(operation, project, view_model)

        return operations, group
    def prepare_operation(self, user_id, project_id, algorithm, view_model_gid,
                          op_group=None, ranges=None, visible=True):

        op_group_id = None
        if op_group:
            op_group_id = op_group.id
        if isinstance(view_model_gid, uuid.UUID):
            view_model_gid = view_model_gid.hex

        operation = Operation(user_id, project_id, algorithm.id, json.dumps({'gid': view_model_gid}),
                              op_group_id=op_group_id, range_values=ranges)
        self.logger.debug("Saving Operation(userId=" + str(user_id) + ",projectId=" + str(project_id) +
                          ",algorithmId=" + str(algorithm.id) + ", ops_group= " + str(op_group_id) + ")")

        operation.visible = visible
        operation = dao.store_entity(operation)
        return operation
Esempio n. 9
0
    def _prepare_metric_operation(self, sim_operation):
        # type: (Operation) -> Operation
        metric_algo = dao.get_algorithm_by_module(MEASURE_METRICS_MODULE,
                                                  MEASURE_METRICS_CLASS)
        datatype_index = h5.REGISTRY.get_index_for_datatype(TimeSeries)
        time_series_index = dao.get_generic_entity(datatype_index,
                                                   sim_operation.id,
                                                   'fk_from_operation')[0]

        view_model = get_class_by_name("{}.{}".format(
            MEASURE_METRICS_MODULE, MEASURE_METRICS_MODEL_CLASS))()
        view_model.time_series = time_series_index.gid
        view_model.algorithms = tuple(choices.values())

        range_values = sim_operation.range_values
        metadata = {
            DataTypeMetaData.KEY_BURST: time_series_index.fk_parent_burst
        }
        metadata, user_group = self._prepare_metadata(
            metadata, metric_algo.algorithm_category, None, {})
        meta_str = json.dumps(metadata)

        parent_burst = dao.get_generic_entity(
            BurstConfiguration, time_series_index.fk_parent_burst, 'id')[0]
        metric_operation_group_id = parent_burst.fk_metric_operation_group
        metric_operation = Operation(sim_operation.fk_launched_by,
                                     sim_operation.fk_launched_in,
                                     metric_algo.id,
                                     json.dumps({'gid': view_model.gid.hex}),
                                     meta_str,
                                     op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        stored_metric_operation = dao.store_entity(metric_operation)

        metrics_datatype_group = dao.get_generic_entity(
            DataTypeGroup, metric_operation_group_id, 'fk_operation_group')[0]
        if metrics_datatype_group.fk_from_operation is None:
            metrics_datatype_group.fk_from_operation = metric_operation.id

        self._store_view_model(stored_metric_operation, sim_operation.project,
                               view_model)
        return stored_metric_operation
Esempio n. 10
0
    def _prepare_metric_operation(self, sim_operation):
        # type: (Operation) -> None
        metric_algo = dao.get_algorithm_by_module(
            TimeseriesMetricsAdapter.__module__,
            TimeseriesMetricsAdapter.__name__)
        time_series_index = dao.get_generic_entity(TimeSeriesIndex,
                                                   sim_operation.id,
                                                   'fk_from_operation')[0]

        view_model = TimeseriesMetricsAdapterModel()
        view_model.time_series = time_series_index.gid
        view_model.algorithms = tuple(choices.values())

        range_values = sim_operation.range_values
        metadata = {
            DataTypeMetaData.KEY_BURST: time_series_index.fk_parent_burst
        }
        metadata, user_group = self._prepare_metadata(
            metadata, metric_algo.algorithm_category, None, {})
        meta_str = json.dumps(metadata)

        parent_burst = dao.get_generic_entity(
            BurstConfiguration, time_series_index.fk_parent_burst, 'id')[0]
        metric_operation_group_id = parent_burst.fk_metric_operation_group
        metric_operation = Operation(sim_operation.fk_launched_by,
                                     sim_operation.fk_launched_in,
                                     metric_algo.id,
                                     json.dumps({'gid': view_model.gid.hex}),
                                     meta_str,
                                     op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        stored_metric_operation = dao.store_entity(metric_operation)

        metrics_datatype_group = dao.get_generic_entity(
            DataTypeGroup, metric_operation_group_id, 'fk_operation_group')[0]
        if metrics_datatype_group.fk_from_operation is None:
            metrics_datatype_group.fk_from_operation = metric_operation.id

        OperationService._store_view_model(stored_metric_operation,
                                           sim_operation.project, view_model)
        return stored_metric_operation
Esempio n. 11
0
    def __copy_linked_datatype_before_delete(self, op, datatype, project, fk_to_project):
        new_op = Operation(op.view_model_gid,
                           dao.get_system_user().id,
                           fk_to_project,
                           datatype.parent_operation.fk_from_algo,
                           datatype.parent_operation.status,
                           datatype.parent_operation.start_date,
                           datatype.parent_operation.completion_date,
                           datatype.parent_operation.fk_operation_group,
                           datatype.parent_operation.additional_info,
                           datatype.parent_operation.user_group,
                           datatype.parent_operation.range_values)
        new_op.visible = datatype.parent_operation.visible
        new_op = dao.store_entity(new_op)
        to_project = self.find_project(fk_to_project)
        to_project_path = self.storage_interface.get_project_folder(to_project.name)

        full_path = h5.path_for_stored_index(datatype)
        old_folder = self.storage_interface.get_project_folder(project.name, str(op.id))
        file_paths = h5.gather_references_of_view_model(op.view_model_gid, old_folder, only_view_models=True)[0]
        file_paths.append(full_path)

        # The BurstConfiguration h5 file has to be moved only when we handle the time series which has the operation
        # folder containing the file
        if datatype.is_ts and datatype.fk_parent_burst is not None:
            bc_path = h5.path_for(datatype.parent_operation.id, BurstConfigurationH5, datatype.fk_parent_burst,
                                  project.name)
            if os.path.exists(bc_path):
                file_paths.append(bc_path)

                bc = dao.get_burst_for_operation_id(op.id)
                bc.fk_simulation = new_op.id
                dao.store_entity(bc)

        # Move all files to the new operation folder
        self.storage_interface.move_datatype_with_sync(to_project, to_project_path, new_op.id, file_paths)

        datatype.fk_from_operation = new_op.id
        datatype.parent_operation = new_op
        dao.store_entity(datatype)

        return new_op
Esempio n. 12
0
    def _prepare_metric_operation(self, sim_operation):
        # type: (Operation) -> None
        metric_algo = dao.get_algorithm_by_module(
            TimeseriesMetricsAdapter.__module__,
            TimeseriesMetricsAdapter.__name__)

        time_series_index = dao.get_generic_entity(TimeSeriesIndex,
                                                   sim_operation.id,
                                                   'fk_from_operation')[0]
        ts_metrics_adapter_form = TimeseriesMetricsAdapterForm()
        ts_metrics_adapter_form.fill_from_trait(
            BaseTimeseriesMetricAlgorithm())
        ts_metrics_adapter_form.time_series.data = time_series_index.gid
        op_params = json.dumps(ts_metrics_adapter_form.get_dict())
        range_values = sim_operation.range_values
        metadata = {
            DataTypeMetaData.KEY_BURST: time_series_index.fk_parent_burst
        }
        metadata, user_group = self._prepare_metadata(
            metadata, metric_algo.algorithm_category, None, op_params)
        meta_str = json.dumps(metadata)

        parent_burst = dao.get_generic_entity(
            BurstConfiguration2, time_series_index.fk_parent_burst, 'id')[0]
        metric_operation_group_id = parent_burst.metric_operation_group_id
        metric_operation = Operation(sim_operation.fk_launched_by,
                                     sim_operation.fk_launched_in,
                                     metric_algo.id,
                                     op_params,
                                     meta_str,
                                     op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        operation = dao.store_entity(metric_operation)

        metrics_datatype_group = dao.get_generic_entity(
            DataTypeGroup, metric_operation_group_id, 'fk_operation_group')[0]
        if metrics_datatype_group.fk_from_operation is None:
            metrics_datatype_group.fk_from_operation = metric_operation.id

        return operation
Esempio n. 13
0
    def build(test_user=None,
              test_project=None,
              is_simulation=False,
              store_vm=False,
              operation_status=STATUS_FINISHED,
              range_values=None,
              conn_gid=None):
        """
        Create persisted operation with a ViewModel stored
        :return: Operation entity after persistence.
        """
        if test_user is None:
            test_user = user_factory()
        if test_project is None:
            test_project = project_factory(test_user)

        vm_gid = uuid.uuid4()
        view_model = None

        if is_simulation:
            algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE,
                                                    SIMULATOR_CLASS)
            if store_vm:
                adapter = ABCAdapter.build_adapter(algorithm)
                view_model = adapter.get_view_model_class()()
                view_model.connectivity = connectivity_factory(
                    4).gid if conn_gid is None else conn_gid
                vm_gid = view_model.gid

        else:
            algorithm = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE,
                                                    TVB_IMPORTER_CLASS)
            if store_vm:
                adapter = ABCAdapter.build_adapter(algorithm)
                view_model = adapter.get_view_model_class()()
                view_model.data_file = "."
                vm_gid = view_model.gid

        operation = Operation(vm_gid.hex,
                              test_user.id,
                              test_project.id,
                              algorithm.id,
                              status=operation_status,
                              range_values=range_values)
        dao.store_entity(operation)

        if store_vm:
            op_folder = FilesHelper().get_project_folder(
                test_project, str(operation.id))
            h5.store_view_model(view_model, op_folder)

        # Make sure lazy attributes are correctly loaded.
        return dao.get_operation_by_id(operation.id)
Esempio n. 14
0
    def prepare_metrics_operation(operation):
        # TODO reuse from OperationService and do not duplicate logic here
        parent_burst = dao.get_generic_entity(BurstConfiguration,
                                              operation.fk_operation_group,
                                              'fk_operation_group')[0]
        metric_operation_group_id = parent_burst.fk_metric_operation_group
        range_values = operation.range_values
        metric_algo = dao.get_algorithm_by_module(MEASURE_METRICS_MODULE,
                                                  MEASURE_METRICS_CLASS)

        metric_operation = Operation(None,
                                     operation.fk_launched_by,
                                     operation.fk_launched_in,
                                     metric_algo.id,
                                     status=STATUS_FINISHED,
                                     op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        metric_operation = dao.store_entity(metric_operation)
        op_dir = StorageInterface().get_project_folder(
            operation.project.name, str(metric_operation.id))
        return op_dir, metric_operation
Esempio n. 15
0
    def prepare_operation(self,
                          user_id,
                          project_id,
                          algorithm_id,
                          category,
                          view_model_gid,
                          op_group,
                          metadata,
                          ranges=None,
                          visible=True):
        operation_parameters = json.dumps({'gid': view_model_gid})
        metadata, user_group = self._prepare_metadata(metadata, category,
                                                      op_group, {})
        meta_str = json.dumps(metadata)

        op_group_id = None
        if op_group:
            op_group_id = op_group.id

        operation = Operation(user_id,
                              project_id,
                              algorithm_id,
                              operation_parameters,
                              op_group_id=op_group_id,
                              meta=meta_str,
                              range_values=ranges)

        self.logger.debug("Saving Operation(userId=" + str(user_id) +
                          ",projectId=" + str(project_id) + "," +
                          str(metadata) + ",algorithmId=" + str(algorithm_id) +
                          ", ops_group= " + str(op_group_id) + ")")

        visible_operation = visible and category.display is False
        operation = dao.store_entity(operation)
        operation.visible = visible_operation

        return operation
Esempio n. 16
0
    def prepare_operation(self, user_id, project, algorithm, visible=True, view_model=None, ranges=None,
                          burst_gid=None, op_group_id=None):
        """
        Do all the necessary preparations for storing an operation. If it's the case of a
        range of values create an operation group and multiple operations for each possible
        instance from the range.
        """
        algo_category = dao.get_category_by_id(algorithm.fk_category)
        ga = self.prepare_metadata(algo_category, current_ga=view_model.generic_attributes, burst=burst_gid)
        ga.visible = visible
        view_model.generic_attributes = ga

        self.logger.debug("Saving Operation(userId=" + str(user_id) + ",projectId=" + str(project.id) +
                          ",algorithmId=" + str(algorithm.id) + ")")

        operation = Operation(view_model.gid.hex, user_id, project.id, algorithm.id, user_group=ga.operation_tag,
                              op_group_id=op_group_id, range_values=ranges)
        operation = dao.store_entity(operation)

        self.store_view_model(operation, project, view_model)

        return operation
Esempio n. 17
0
    def _remove_project_node_files(self, project_id, gid, skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)
            links = dao.get_links_for_datatype(datatype.id)

            op = dao.get_operation_by_id(datatype.fk_from_operation)
            adapter = ABCAdapter.build_adapter(op.algorithm)
            if links:
                was_link = False
                for link in links:
                    # This means it's only a link and we need to remove it
                    if link.fk_from_datatype == datatype.id and link.fk_to_project == project.id:
                        dao.remove_entity(Links, link.id)
                        was_link = True
                if not was_link:
                    # Create a clone of the operation
                    # There is no view_model so the view_model_gid is None

                    new_op = Operation(op.view_model_gid,
                                       dao.get_system_user().id,
                                       links[0].fk_to_project,
                                       datatype.parent_operation.fk_from_algo,
                                       datatype.parent_operation.status,
                                       datatype.parent_operation.start_date,
                                       datatype.parent_operation.completion_date,
                                       datatype.parent_operation.fk_operation_group,
                                       datatype.parent_operation.additional_info,
                                       datatype.parent_operation.user_group,
                                       datatype.parent_operation.range_values)
                    new_op = dao.store_entity(new_op)
                    to_project = self.find_project(links[0].fk_to_project)
                    to_project_path = self.structure_helper.get_project_folder(to_project)

                    encryption_handler.set_project_active(to_project)
                    encryption_handler.sync_folders(to_project_path)
                    to_project_name = to_project.name

                    full_path = h5.path_for_stored_index(datatype)
                    self.structure_helper.move_datatype(datatype, to_project_name, str(new_op.id), full_path)
                    # Move also the ViewModel H5
                    old_folder = self.structure_helper.get_project_folder(project, str(op.id))
                    view_model = adapter.load_view_model(op)
                    vm_full_path = h5.determine_filepath(op.view_model_gid, old_folder)
                    self.structure_helper.move_datatype(view_model, to_project_name, str(new_op.id), vm_full_path)

                    encryption_handler.sync_folders(to_project_path)
                    encryption_handler.set_project_inactive(to_project)

                    datatype.fk_from_operation = new_op.id
                    datatype.parent_operation = new_op
                    dao.store_entity(datatype)
                    dao.remove_entity(Links, links[0].id)
            else:
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)
                h5_path = h5.path_for_stored_index(datatype)
                self.structure_helper.remove_datatype_file(h5_path)
                encryption_handler.push_folder_to_sync(self.structure_helper.get_project_folder_from_h5(h5_path))

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException("Remove operation failed for unknown reasons.Please contact system administrator.")
Esempio n. 18
0
    def build(subject="Datatype Factory User", state="RAW_DATA", project=None):
        # TODO This is not real, we miss a ViewModel stored
        # there store the name and the (hi, lo, step) value of the range parameters
        range_1 = ["row1", [1, 2, 10]]
        range_2 = ["row2", [0.1, 0.3, 0.5]]
        # there are the actual numbers in the interval
        range_values_1 = [1, 3, 5, 7, 9]
        range_values_2 = [0.1, 0.4]

        user = user_factory()
        if project is None:
            project = project_factory(user)

        algorithm = dao.get_algorithm_by_module(
            IntrospectionRegistry.SIMULATOR_MODULE,
            IntrospectionRegistry.SIMULATOR_CLASS)

        # Create operation
        operation = operation_factory(algorithm=algorithm,
                                      test_user=user,
                                      test_project=project)

        group = OperationGroup(
            project.id, ranges=[json.dumps(range_1),
                                json.dumps(range_2)])
        group = dao.store_entity(group)
        group_ms = OperationGroup(
            project.id, ranges=[json.dumps(range_1),
                                json.dumps(range_2)])
        group_ms = dao.store_entity(group_ms)

        datatype_group = DataTypeGroup(group,
                                       subject=subject,
                                       state=state,
                                       operation_id=operation.id)
        datatype_group.no_of_ranges = 2
        datatype_group.count_results = 10
        datatype_group = dao.store_entity(datatype_group)

        dt_group_ms = DataTypeGroup(group_ms,
                                    subject=subject,
                                    state=state,
                                    operation_id=operation.id)
        datatype_group.no_of_ranges = 2
        datatype_group.count_results = 10
        dao.store_entity(dt_group_ms)

        # Now create some data types and add them to group
        for range_val1 in range_values_1:
            for range_val2 in range_values_2:
                op = Operation(user.id,
                               project.id,
                               algorithm.id,
                               'test parameters',
                               status=STATUS_FINISHED,
                               range_values=json.dumps({
                                   range_1[0]: range_val1,
                                   range_2[0]: range_val2
                               }))
                op.fk_operation_group = group.id
                op = dao.store_entity(op)
                datatype = time_series_index_factory(op=op)
                datatype.number1 = range_val1
                datatype.number2 = range_val2
                datatype.fk_datatype_group = datatype_group.id
                datatype.operation_id = op.id
                dao.store_entity(datatype)

                op_ms = Operation(user.id,
                                  project.id,
                                  algorithm.id,
                                  'test parameters',
                                  status=STATUS_FINISHED,
                                  range_values=json.dumps({
                                      range_1[0]:
                                      range_val1,
                                      range_2[0]:
                                      range_val2
                                  }))
                op_ms.fk_operation_group = group_ms.id
                op_ms = dao.store_entity(op_ms)
                datatype_measure_factory(datatype, op_ms, dt_group_ms)

        return datatype_group
Esempio n. 19
0
    def build(project=None, store_vm=False, use_time_series_region=False, status=STATUS_FINISHED):
        # there store the name and the (hi, lo, step) value of the range parameters
        range_1 = ["row1", [1, 2, 6]]
        range_2 = ["row2", [0.1, 0.3, 0.5]]
        # there are the actual numbers in the interval
        range_values_1 = [1, 3, 5]
        range_values_2 = [0.1, 0.4]

        user = user_factory()
        if project is None:
            project = project_factory(user)

        connectivity = connectivity_factory(4)
        if use_time_series_region:
            operation = operation_factory(test_project=project)
            connectivity_index_factory(op=operation, conn=connectivity)

            operation2 = operation_factory(test_project=project)
            surface = surface_factory()
            surface_index_factory(op=operation2, surface=surface)

            operation3 = operation_factory(test_project=project)
            region_mapping = region_mapping_factory(surface=surface, connectivity=connectivity)
            region_mapping_index_factory(op=operation3, conn_gid=connectivity.gid.hex, surface_gid=surface.gid.hex, region_mapping=region_mapping)

        algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE, SIMULATOR_CLASS)
        adapter = ABCAdapter.build_adapter(algorithm)
        if store_vm:
            view_model = adapter.get_view_model_class()()
            view_model.connectivity = connectivity.gid
        else:
            view_model = None

        algorithm_ms = dao.get_algorithm_by_module(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS)
        adapter = ABCAdapter.build_adapter(algorithm_ms)
        view_model_ms = adapter.get_view_model_class()()

        op_group = OperationGroup(project.id, ranges=[json.dumps(range_1), json.dumps(range_2)])
        op_group = dao.store_entity(op_group)
        op_group_ms = OperationGroup(project.id, ranges=[json.dumps(range_1), json.dumps(range_2)])
        op_group_ms = dao.store_entity(op_group_ms)

        datatype_group = DataTypeGroup(op_group, state="RAW_DATA")
        datatype_group.no_of_ranges = 2
        datatype_group.count_results = 6
        datatype_group = dao.store_entity(datatype_group)

        dt_group_ms = DataTypeGroup(op_group_ms, state="RAW_DATA")
        dt_group_ms.no_of_ranges = 2
        dt_group_ms.count_results = 6
        dao.store_entity(dt_group_ms)

        # Now create some data types and add them to group
        for range_val1 in range_values_1:
            for range_val2 in range_values_2:

                view_model_gid = uuid.uuid4()
                view_model_ms_gid = uuid.uuid4()

                op = Operation(view_model_gid.hex, user.id, project.id, algorithm.id,
                               status=status, op_group_id=op_group.id,
                               range_values=json.dumps({range_1[0]: range_val1,
                                                        range_2[0]: range_val2}))
                op = dao.store_entity(op)
                if use_time_series_region:
                    ts = time_series_region_factory(connectivity=connectivity, region_mapping=region_mapping)
                    ts_index = time_series_region_index_factory(ts=ts, connectivity=connectivity,
                                                                region_mapping=region_mapping, test_user=user,
                                                                test_project=project, op=op)
                else:
                    ts = time_series_factory()
                    ts_index = time_series_index_factory(ts=ts, op=op)
                ts_index.fk_datatype_group = datatype_group.id
                dao.store_entity(ts_index)

                op_ms = Operation(view_model_ms_gid.hex, user.id, project.id, algorithm.id,
                                  status=STATUS_FINISHED, op_group_id=op_group_ms.id,
                                  range_values=json.dumps({range_1[0]: range_val1,
                                                           range_2[0]: range_val2}))
                op_ms = dao.store_entity(op_ms)
                datatype_measure_factory(ts_index, ts, op_ms, dt_group_ms)

                if store_vm:
                    view_model = copy.deepcopy(view_model)
                    view_model.gid = view_model_gid
                    op_path = StorageInterface().get_project_folder(project.name, str(op.id))
                    h5.store_view_model(view_model, op_path)

                    view_model_ms = copy.deepcopy(view_model_ms)
                    view_model_ms.gid = view_model_ms_gid
                    view_model_ms.time_series = ts_index.gid
                    op_ms_path = StorageInterface().get_project_folder(project.name, str(op_ms.id))
                    h5.store_view_model(view_model_ms, op_ms_path)

                if not datatype_group.fk_from_operation:
                    # Mark first operation ID
                    datatype_group.fk_from_operation = op.id
                    dt_group_ms.fk_from_operation = op_ms.id
                    datatype_group = dao.store_entity(datatype_group)
                    dt_group_ms = dao.store_entity(dt_group_ms)

        return datatype_group, dt_group_ms
Esempio n. 20
0
    def _retrieve_operations_in_order(self, project, import_path):
        # type: (Project, str) -> list[Operation2ImportData]
        retrieved_operations = []

        for root, _, files in os.walk(import_path):
            if OPERATION_XML in files:
                # Previous Operation format for uploading previous versions of projects
                operation_file_path = os.path.join(root, OPERATION_XML)
                operation, operation_xml_parameters = self.__build_operation_from_file(
                    project, operation_file_path)
                operation.import_file = operation_file_path
                self.logger.debug("Found operation in old XML format: " +
                                  str(operation))
                retrieved_operations.append(
                    Operation2ImportData(
                        operation,
                        root,
                        info_from_xml=operation_xml_parameters))

            else:
                # We strive for the new format with ViewModelH5
                main_view_model = None
                dt_paths = []
                all_view_model_files = []
                for file in files:
                    if file.endswith(FilesHelper.TVB_STORAGE_FILE_EXTENSION):
                        h5_file = os.path.join(root, file)
                        try:
                            h5_class = H5File.h5_class_from_file(h5_file)
                            if h5_class is ViewModelH5:
                                all_view_model_files.append(h5_file)
                                if not main_view_model:
                                    view_model = h5.load_view_model_from_file(
                                        h5_file)
                                    if type(view_model
                                            ) in VIEW_MODEL2ADAPTER.keys():
                                        main_view_model = view_model
                            else:
                                file_update_manager = FilesUpdateManager()
                                file_update_manager.upgrade_file(h5_file)
                                dt_paths.append(h5_file)
                        except Exception:
                            self.logger.warning(
                                "Unreadable H5 file will be ignored: %s" %
                                h5_file)

                if main_view_model is not None:
                    alg = VIEW_MODEL2ADAPTER[type(main_view_model)]
                    operation = Operation(main_view_model.gid.hex,
                                          project.fk_admin,
                                          project.id,
                                          alg.id,
                                          status=STATUS_FINISHED,
                                          user_group=main_view_model.
                                          generic_attributes.operation_tag,
                                          start_date=datetime.now(),
                                          completion_date=datetime.now())
                    operation.create_date = main_view_model.create_date
                    self.logger.debug(
                        "Found main ViewModel to create operation for it: " +
                        str(operation))

                    retrieved_operations.append(
                        Operation2ImportData(operation, root, main_view_model,
                                             dt_paths, all_view_model_files))

                elif len(dt_paths) > 0:
                    alg = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE,
                                                      TVB_IMPORTER_CLASS)
                    default_adapter = ABCAdapter.build_adapter(alg)
                    view_model = default_adapter.get_view_model_class()()
                    view_model.data_file = dt_paths[0]
                    vm_path = h5.store_view_model(view_model, root)
                    all_view_model_files.append(vm_path)
                    operation = Operation(view_model.gid.hex,
                                          project.fk_admin,
                                          project.id,
                                          alg.id,
                                          status=STATUS_FINISHED,
                                          start_date=datetime.now(),
                                          completion_date=datetime.now())
                    self.logger.debug(
                        "Found no ViewModel in folder, so we default to " +
                        str(operation))

                    retrieved_operations.append(
                        Operation2ImportData(operation, root, view_model,
                                             dt_paths, all_view_model_files,
                                             True))

        return sorted(retrieved_operations,
                      key=lambda op_data: op_data.order_field)
Esempio n. 21
0
    def build(subject="Datatype Factory User", state="RAW_DATA", project=None):

        range_1 = ["row1", [1, 2, 3]]
        range_2 = ["row2", [0.1, 0.3, 0.5]]

        user = user_factory()

        if project is None:
            project = project_factory(user)

        # Create an algorithm
        alg_category = AlgorithmCategory('one', True)
        dao.store_entity(alg_category)
        ad = Algorithm(IntrospectionRegistry.SIMULATOR_MODULE,
                       IntrospectionRegistry.SIMULATOR_CLASS, alg_category.id)
        algorithm = dao.get_algorithm_by_module(
            IntrospectionRegistry.SIMULATOR_MODULE,
            IntrospectionRegistry.SIMULATOR_CLASS)

        if algorithm is None:
            algorithm = dao.store_entity(ad)

        # Create meta
        meta = {
            DataTypeMetaData.KEY_SUBJECT: "Datatype Factory User",
            DataTypeMetaData.KEY_STATE: "RAW_DATA"
        }

        # Create operation
        operation = operation_factory(algorithm=algorithm,
                                      test_user=user,
                                      test_project=project,
                                      meta=meta)

        group = OperationGroup(
            project.id, ranges=[json.dumps(range_1),
                                json.dumps(range_2)])
        group = dao.store_entity(group)
        group_ms = OperationGroup(
            project.id, ranges=[json.dumps(range_1),
                                json.dumps(range_2)])
        group_ms = dao.store_entity(group_ms)

        datatype_group = DataTypeGroup(group,
                                       subject=subject,
                                       state=state,
                                       operation_id=operation.id)

        datatype_group = dao.store_entity(datatype_group)

        dt_group_ms = DataTypeGroup(group_ms,
                                    subject=subject,
                                    state=state,
                                    operation_id=operation.id)
        dao.store_entity(dt_group_ms)

        # Now create some data types and add them to group
        for range_val1 in range_1[1]:
            for range_val2 in range_2[1]:
                op = Operation(user.id,
                               project.id,
                               algorithm.id,
                               'test parameters',
                               meta=json.dumps(meta),
                               status=STATUS_FINISHED,
                               range_values=json.dumps({
                                   range_1[0]: range_val1,
                                   range_2[0]: range_val2
                               }))
                op.fk_operation_group = group.id
                op = dao.store_entity(op)
                datatype = time_series_index_factory(op=op)
                datatype.number1 = range_val1
                datatype.number2 = range_val2
                datatype.fk_datatype_group = datatype_group.id
                datatype.operation_id = op.id
                dao.store_entity(datatype)

                op_ms = Operation(user.id,
                                  project.id,
                                  algorithm.id,
                                  'test parameters',
                                  meta=json.dumps(meta),
                                  status=STATUS_FINISHED,
                                  range_values=json.dumps({
                                      range_1[0]:
                                      range_val1,
                                      range_2[0]:
                                      range_val2
                                  }))
                op_ms.fk_operation_group = group_ms.id
                op_ms = dao.store_entity(op_ms)
                datatype_measure_factory(datatype)

        return datatype_group
Esempio n. 22
0
    def prepare_operations(self,
                           user_id,
                           project_id,
                           algorithm,
                           category,
                           metadata,
                           visible=True,
                           existing_dt_group=None,
                           **kwargs):
        """
        Do all the necessary preparations for storing an operation. If it's the case of a 
        range of values create an operation group and multiple operations for each possible
        instance from the range.
        :param metadata: Initial MetaData with potential Burst identification inside.
        """
        operations = []

        available_args, group = self._prepare_group(project_id,
                                                    existing_dt_group, kwargs)
        if len(available_args) > TvbProfile.current.MAX_RANGE_NUMBER:
            raise LaunchException(
                "Too big range specified. You should limit the"
                " resulting operations to %d" %
                TvbProfile.current.MAX_RANGE_NUMBER)
        else:
            self.logger.debug("Launching a range with %d operations..." %
                              len(available_args))
        group_id = None
        if group is not None:
            group_id = group.id
        metadata, user_group = self._prepare_metadata(metadata, category,
                                                      group, kwargs)

        self.logger.debug("Saving Operation(userId=" + str(user_id) +
                          ",projectId=" + str(project_id) + "," +
                          str(metadata) + ",algorithmId=" + str(algorithm.id) +
                          ", ops_group= " + str(group_id) + ")")

        visible_operation = visible and category.display is False
        meta_str = json.dumps(metadata)
        for (one_set_of_args, range_vals) in available_args:
            range_values = json.dumps(range_vals) if range_vals else None
            operation = Operation(user_id,
                                  project_id,
                                  algorithm.id,
                                  json.dumps(one_set_of_args),
                                  meta_str,
                                  op_group_id=group_id,
                                  user_group=user_group,
                                  range_values=range_values)
            operation.visible = visible_operation
            operations.append(operation)
        operations = dao.store_entities(operations)

        if group is not None:
            burst_id = None
            if DataTypeMetaData.KEY_BURST in metadata:
                burst_id = metadata[DataTypeMetaData.KEY_BURST]
            if existing_dt_group is None:
                datatype_group = DataTypeGroup(
                    group,
                    operation_id=operations[0].id,
                    fk_parent_burst=burst_id,
                    state=metadata[DataTypeMetaData.KEY_STATE])
                dao.store_entity(datatype_group)
            else:
                # Reset count
                existing_dt_group.count_results = None
                dao.store_entity(existing_dt_group)

        return operations, group
Esempio n. 23
0
 def launch_sim(self, user_id, project, algorithm, zip_folder_path,
                simulator_file):
     return Operation('', '', '', {})
Esempio n. 24
0
    def prepare_operations_for_workflowsteps(self, workflow_step_list,
                                             workflows, user_id, burst_id,
                                             project_id, group,
                                             sim_operations):
        """
        Create and store Operation entities from a list of Workflow Steps.
        Will be generated workflows x workflow_step_list Operations.
        For every step in workflow_step_list one OperationGroup and one DataTypeGroup will be created 
        (in case of PSE).
        """

        for step in workflow_step_list:
            operation_group = None
            if (group is not None) and not isinstance(step, WorkflowStepView):
                operation_group = OperationGroup(project_id=project_id,
                                                 ranges=group.range_references)
                operation_group = dao.store_entity(operation_group)

            operation = None
            metadata = {DataTypeMetaData.KEY_BURST: burst_id}
            algo_category = dao.get_algorithm_by_id(step.fk_algorithm)
            if algo_category is not None:
                algo_category = algo_category.algorithm_category

            for wf_idx, workflow in enumerate(workflows):
                cloned_w_step = step.clone()
                cloned_w_step.fk_workflow = workflow.id
                dynamic_params = cloned_w_step.dynamic_param
                op_params = cloned_w_step.static_param
                op_params.update(dynamic_params)
                range_values = None
                group_id = None
                if operation_group is not None:
                    group_id = operation_group.id
                    range_values = sim_operations[wf_idx].range_values

                if not isinstance(step, WorkflowStepView):
                    ## For visualization steps, do not create operations, as those are not really needed.
                    metadata, user_group = self._prepare_metadata(
                        metadata, algo_category, operation_group, op_params)
                    operation = Operation(user_id,
                                          project_id,
                                          step.fk_algorithm,
                                          json.dumps(
                                              op_params,
                                              cls=MapAsJson.MapAsJsonEncoder),
                                          meta=json.dumps(metadata),
                                          op_group_id=group_id,
                                          range_values=range_values,
                                          user_group=user_group)
                    operation.visible = step.step_visible
                    operation = dao.store_entity(operation)
                    cloned_w_step.fk_operation = operation.id

                dao.store_entity(cloned_w_step)

            if operation_group is not None and operation is not None:
                datatype_group = DataTypeGroup(
                    operation_group,
                    operation_id=operation.id,
                    fk_parent_burst=burst_id,
                    state=metadata[DataTypeMetaData.KEY_STATE])
                dao.store_entity(datatype_group)