Esempio n. 1
0
    def build():

        time_series_index = time_series_index_factory()
        time_series = h5.load_from_index(time_series_index)
        data = numpy.random.random((10, 10))
        covariance = graph.Covariance(source=time_series, array_data=data)

        op = operation_factory()

        covariance_index = CovarianceIndex()
        covariance_index.fk_from_operation = op.id
        covariance_index.fill_from_has_traits(covariance)

        covariance_h5_path = h5.path_for_stored_index(covariance_index)
        with CovarianceH5(covariance_h5_path) as f:
            f.store(covariance)

        covariance_index = dao.store_entity(covariance_index)
        return covariance_index
Esempio n. 2
0
    def load_datatype_from_file(self,
                                current_file,
                                op_id,
                                datatype_group=None,
                                current_project_id=None):
        # type: (str, int, DataTypeGroup, int) -> HasTraitsIndex
        """
        Creates an instance of datatype from storage / H5 file 
        :returns: DatatypeIndex
        """
        self.logger.debug("Loading DataType from file: %s" % current_file)
        h5_class = H5File.h5_class_from_file(current_file)
        if h5_class is BurstConfigurationH5:
            if current_project_id is None:
                op_entity = dao.get_operationgroup_by_id(op_id)
                current_project_id = op_entity.fk_launched_in
            h5_file = BurstConfigurationH5(current_file)
            burst = BurstConfiguration(current_project_id)
            burst.fk_simulation = op_id
            # burst.fk_operation_group = TODO
            # burst.fk_metric_operation_group = TODO
            h5_file.load_into(burst)
            result = burst
        else:
            datatype, generic_attributes = h5.load_with_links(current_file)
            index_class = h5.REGISTRY.get_index_for_datatype(
                datatype.__class__)
            datatype_index = index_class()
            datatype_index.fill_from_has_traits(datatype)
            datatype_index.fill_from_generic_attributes(generic_attributes)

            # Add all the required attributes
            if datatype_group is not None:
                datatype_index.fk_datatype_group = datatype_group.id
            datatype_index.fk_from_operation = op_id

            associated_file = h5.path_for_stored_index(datatype_index)
            if os.path.exists(associated_file):
                datatype_index.disk_size = FilesHelper.compute_size_on_disk(
                    associated_file)
            result = datatype_index

        return result
Esempio n. 3
0
    def __copy_linked_datatype_before_delete(self, op, datatype, project, fk_to_project):
        new_op = Operation(op.view_model_gid,
                           dao.get_system_user().id,
                           fk_to_project,
                           datatype.parent_operation.fk_from_algo,
                           datatype.parent_operation.status,
                           datatype.parent_operation.start_date,
                           datatype.parent_operation.completion_date,
                           datatype.parent_operation.fk_operation_group,
                           datatype.parent_operation.additional_info,
                           datatype.parent_operation.user_group,
                           datatype.parent_operation.range_values)
        new_op.visible = datatype.parent_operation.visible
        new_op = dao.store_entity(new_op)
        to_project = self.find_project(fk_to_project)
        to_project_path = self.storage_interface.get_project_folder(to_project.name)

        full_path = h5.path_for_stored_index(datatype)
        old_folder = self.storage_interface.get_project_folder(project.name, str(op.id))
        file_paths = h5.gather_references_of_view_model(op.view_model_gid, old_folder, only_view_models=True)[0]
        file_paths.append(full_path)

        # The BurstConfiguration h5 file has to be moved only when we handle the time series which has the operation
        # folder containing the file
        if datatype.is_ts and datatype.fk_parent_burst is not None:
            bc_path = h5.path_for(datatype.parent_operation.id, BurstConfigurationH5, datatype.fk_parent_burst,
                                  project.name)
            if os.path.exists(bc_path):
                file_paths.append(bc_path)

                bc = dao.get_burst_for_operation_id(op.id)
                bc.fk_simulation = new_op.id
                dao.store_entity(bc)

        # Move all files to the new operation folder
        self.storage_interface.move_datatype_with_sync(to_project, to_project_path, new_op.id, file_paths)

        datatype.fk_from_operation = new_op.id
        datatype.parent_operation = new_op
        dao.store_entity(datatype)

        return new_op
Esempio n. 4
0
    def store_datatype(self, datatype, current_file=None):
        """This method stores data type into DB"""
        try:
            self.logger.debug("Store datatype: %s with Gid: %s" % (datatype.__class__.__name__, datatype.gid))
            # Now move storage file into correct folder if necessary
            if current_file is not None:
                final_path = h5.path_for_stored_index(datatype)
                if final_path != current_file:
                    shutil.move(current_file, final_path)

            return dao.store_entity(datatype)
        except MissingDataSetException as e:
            self.logger.exception(e)
            error_msg = "Datatype %s has missing data and could not be imported properly." % (datatype,)
            raise ImportException(error_msg)
        except IntegrityError as excep:
            self.logger.exception(excep)
            error_msg = "Could not import data with gid: %s. There is already a one with " \
                        "the same name or gid." % datatype.gid
            raise ImportException(error_msg)
Esempio n. 5
0
    def build(data=None, op=None):
        ts = time_series_factory(data)

        if op is None:
            op = operation_factory()

        ts_db = TimeSeriesIndex()
        ts_db.fk_from_operation = op.id
        ts_db.fill_from_has_traits(ts)

        ts_h5_path = h5.path_for_stored_index(ts_db)
        with TimeSeriesH5(ts_h5_path) as f:
            f.store(ts)
            f.sample_rate.store(ts.sample_rate)
            f.nr_dimensions.store(ts.data.ndim)
            f.store_generic_attributes(GenericAttributes())
            f.store_references(ts)

        ts_db = dao.store_entity(ts_db)
        return ts_db
Esempio n. 6
0
    def build():
        time_series_index = time_series_index_factory()
        time_series = h5.load_from_index(time_series_index)
        cross_coherence = spectral.CoherenceSpectrum(source=time_series,
                                                    nfft=4,
                                                    array_data=numpy.random.random((10, 10)),
                                                    frequency=numpy.random.random((10,)))

        op = operation_factory()

        cross_coherence_index = CoherenceSpectrumIndex()
        cross_coherence_index.fk_from_operation = op.id
        cross_coherence_index.fill_from_has_traits(cross_coherence)

        cross_coherence_h5_path = h5.path_for_stored_index(cross_coherence_index)
        with CoherenceSpectrumH5(cross_coherence_h5_path) as f:
            f.store(cross_coherence)

        cross_coherence_index = dao.store_entity(cross_coherence_index)
        return cross_coherence_index
Esempio n. 7
0
    def build():
        time_series_index = time_series_index_factory()
        time_series = h5.load_from_index(time_series_index)
        data = numpy.random.random((10, 10, 10, 10, 10))
        cross_correlation = temporal_correlations.CrossCorrelation(
            source=time_series, array_data=data)

        op = operation_factory()

        cross_correlation_index = CrossCorrelationIndex()
        cross_correlation_index.fk_from_operation = op.id
        cross_correlation_index.fill_from_has_traits(cross_correlation)

        cross_correlation_h5_path = h5.path_for_stored_index(
            cross_correlation_index)
        with CrossCorrelationH5(cross_correlation_h5_path) as f:
            f.store(cross_correlation)

        cross_correlation_index = dao.store_entity(cross_correlation_index)
        return cross_correlation_index
Esempio n. 8
0
    def prepare_datatypes_for_export(data):
        """
        Method used for exporting data type groups. This method returns a list of all datatype indexes needed to be
        exported and a dictionary where keys are operation folder names and the values are lists containing the paths
        that belong to one particular operation folder.
        """
        all_datatypes = ProjectService.get_all_datatypes_from_data(data)
        first_datatype = all_datatypes[0]

        # We are exporting a group of datatype measures so we need to find the group of time series
        if hasattr(first_datatype, 'fk_source_gid'):
            ts = h5.load_entity_by_gid(first_datatype.fk_source_gid)
            dt_metric_group = dao.get_datatypegroup_by_op_group_id(
                ts.parent_operation.fk_operation_group)
            datatype_measure_list = ProjectService.get_all_datatypes_from_data(
                dt_metric_group)
            all_datatypes = datatype_measure_list + all_datatypes
        else:
            ts_group = dao.get_datatype_measure_group_from_ts_from_pse(
                first_datatype.gid, DatatypeMeasureIndex)
            time_series_list = ProjectService.get_all_datatypes_from_data(
                ts_group)
            all_datatypes = all_datatypes + time_series_list

        if all_datatypes is None or len(all_datatypes) == 0:
            raise ExportException(
                "Could not export a data type group with no data!")

        op_file_dict = dict()
        for dt in all_datatypes:
            h5_path = h5.path_for_stored_index(dt)
            op_folder = os.path.dirname(h5_path)
            op_file_dict[op_folder] = [h5_path]

            op = dao.get_operation_by_id(dt.fk_from_operation)
            vms = h5.gather_references_of_view_model(op.view_model_gid,
                                                     os.path.dirname(h5_path),
                                                     only_view_models=True)
            op_file_dict[op_folder].extend(vms[0])

        return all_datatypes, op_file_dict
Esempio n. 9
0
    def build():
        time = numpy.linspace(0, 1000, 4000)
        data = numpy.zeros((time.size, 1, 3, 1))
        data[:, 0, 0, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 40)
        data[:, 0, 1, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 200)
        data[:, 0, 2, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 100) + numpy.sin(2 * numpy.pi * time / 1000.0 * 300)

        ts = TimeSeries(time=time, data=data, sample_period=1.0 / 4000)
        op = operation_factory()

        ts_db = TimeSeriesIndex()
        ts_db.fk_from_operation = op.id
        ts_db.fill_from_has_traits(ts)

        ts_h5_path = h5.path_for_stored_index(ts_db)
        with TimeSeriesH5(ts_h5_path) as f:
            f.store(ts)

        session.add(ts_db)
        session.commit()
        return ts_db
Esempio n. 10
0
    def load_datatype_from_file(self,
                                storage_folder,
                                file_name,
                                op_id,
                                datatype_group=None,
                                move=True,
                                final_storage=None):
        """
        Creates an instance of datatype from storage / H5 file 
        :returns: DatatypeIndex
        """
        self.logger.debug("Loading DataType from file: %s" % file_name)
        datatype, generic_attributes = h5.load_with_references(
            os.path.join(storage_folder, file_name))
        index_class = h5.REGISTRY.get_index_for_datatype(datatype.__class__)
        datatype_index = index_class()
        datatype_index.fill_from_has_traits(datatype)
        datatype_index.fill_from_generic_attributes(generic_attributes)

        # Add all the required attributes
        if datatype_group is not None:
            datatype_index.fk_datatype_group = datatype_group.id
        datatype_index.fk_from_operation = op_id

        associated_file = h5.path_for_stored_index(datatype_index)
        if os.path.exists(associated_file):
            datatype_index.disk_size = FilesHelper.compute_size_on_disk(
                associated_file)

        # Now move storage file into correct folder if necessary
        if move and final_storage is not None:
            current_file = os.path.join(storage_folder, file_name)
            h5_type = h5.REGISTRY.get_h5file_for_datatype(datatype.__class__)
            final_path = h5.path_for(final_storage, h5_type, datatype.gid)
            if final_path != current_file and move:
                shutil.move(current_file, final_path)

        return datatype_index
Esempio n. 11
0
    def export(self, data, project, public_key_path, password):
        """
        Exports data type:
        1. If data is a normal data type, simply exports storage file (HDF format)
        2. If data is a DataTypeGroup creates a zip with all files for all data types
        """
        download_file_name = self._get_export_file_name(data)

        if DataTypeGroup.is_data_a_group(data):
            _, op_file_dict = self.prepare_datatypes_for_export(data)

            # Create ZIP archive
            zip_file = self.storage_interface.export_datatypes_structure(
                op_file_dict, data, download_file_name, public_key_path,
                password)
            return download_file_name, zip_file, True
        else:
            data_path = h5.path_for_stored_index(data)
            data_file = self.storage_interface.export_datatypes(
                [data_path], data, download_file_name, public_key_path,
                password)

            return None, data_file, True
Esempio n. 12
0
    def build(row1=None,
              row2=None,
              project=None,
              operation=None,
              subject=None,
              state=None):
        data_type = dummy_datatype_factory()
        data_type.row1 = row1
        data_type.row2 = row2

        if operation is None:
            operation = operation_factory(test_project=project)

        data_type_index = DummyDataTypeIndex(subject=subject, state=state)
        data_type_index.fk_from_operation = operation.id
        data_type_index.fill_from_has_traits(data_type)

        data_type_h5_path = h5.path_for_stored_index(data_type_index)
        with DummyDataTypeH5(data_type_h5_path) as f:
            f.store(data_type)

        data_type_index = dao.store_entity(data_type_index)
        return data_type_index
Esempio n. 13
0
def search_and_export_ts(project_id, export_folder=os.path.join("~", "TVB")):
    # This is the simplest filter you could write: filter and entity by Subject
    filter_connectivity = FilterChain(
        fields=[FilterChain.datatype + '.subject'],
        operations=["=="],
        values=[DataTypeMetaData.DEFAULT_SUBJECT])

    connectivities = _retrieve_entities_by_filters(ConnectivityIndex,
                                                   project_id,
                                                   filter_connectivity)

    # A more complex filter: by linked entity (connectivity), saompling, operation date:
    filter_timeseries = FilterChain(
        fields=[
            FilterChain.datatype + '.fk_connectivity_gid',
            FilterChain.datatype + '.sample_period',
            FilterChain.operation + '.create_date'
        ],
        operations=["==", ">=", "<="],
        values=[connectivities[0].gid, 0,
                datetime.now()])

    # If you want to filter another type of TS, change the kind class bellow,
    # instead of TimeSeriesRegion use TimeSeriesEEG, or TimeSeriesSurface, etc.
    timeseries = _retrieve_entities_by_filters(TimeSeriesRegionIndex,
                                               project_id, filter_timeseries)

    for ts in timeseries:
        print("=============================")
        print(ts.summary_info)
        storage_h5 = h5.path_for_stored_index(ts)
        print(" Original file: " + str(storage_h5))
        destination_folder = os.path.expanduser(export_folder)
        shutil.copy2(storage_h5, destination_folder)
        print("File {0} exported in {1}".format(storage_h5,
                                                destination_folder))
Esempio n. 14
0
 def load_with_references(dt_gid):
     # type: (uuid.UUID) -> HasTraits
     dt_index = load_entity_by_gid(dt_gid)
     h5_path = h5.path_for_stored_index(dt_index)
     dt, _ = h5.load_with_references(h5_path)
     return dt
Esempio n. 15
0
 def load_with_references(self, dt_gid):
     # type: (typing.Union[uuid.UUID, str]) -> HasTraits
     dt_index = self.load_entity_by_gid(dt_gid)
     h5_path = h5.path_for_stored_index(dt_index)
     dt, _ = h5.load_with_references(h5_path)
     return dt
Esempio n. 16
0
    def load_datatype_from_file(self,
                                current_file,
                                op_id,
                                datatype_group=None,
                                current_project_id=None):
        # type: (str, int, DataTypeGroup, int) -> HasTraitsIndex
        """
        Creates an instance of datatype from storage / H5 file
        :returns: DatatypeIndex
        """
        self.logger.debug("Loading DataType from file: %s" % current_file)
        h5_class = H5File.h5_class_from_file(current_file)

        if h5_class is BurstConfigurationH5:
            if current_project_id is None:
                op_entity = dao.get_operationgroup_by_id(op_id)
                current_project_id = op_entity.fk_launched_in
            h5_file = BurstConfigurationH5(current_file)
            burst = BurstConfiguration(current_project_id)
            burst.fk_simulation = op_id
            h5_file.load_into(burst)
            result = burst
        else:
            datatype, generic_attributes = h5.load_with_links(current_file)

            already_existing_datatype = h5.load_entity_by_gid(datatype.gid)
            if datatype_group is not None and already_existing_datatype is not None:
                raise DatatypeGroupImportException(
                    "The datatype group that you are trying to import"
                    " already exists!")
            index_class = h5.REGISTRY.get_index_for_datatype(
                datatype.__class__)
            datatype_index = index_class()
            datatype_index.fill_from_has_traits(datatype)
            datatype_index.fill_from_generic_attributes(generic_attributes)

            if datatype_group is not None and hasattr(datatype_index, 'fk_source_gid') and \
                    datatype_index.fk_source_gid is not None:
                ts = h5.load_entity_by_gid(datatype_index.fk_source_gid)

                if ts is None:
                    op = dao.get_operations_in_group(
                        datatype_group.fk_operation_group,
                        only_first_operation=True)
                    op.fk_operation_group = None
                    dao.store_entity(op)
                    dao.remove_entity(OperationGroup,
                                      datatype_group.fk_operation_group)
                    dao.remove_entity(DataTypeGroup, datatype_group.id)
                    raise DatatypeGroupImportException(
                        "Please import the time series group before importing the"
                        " datatype measure group!")

            # Add all the required attributes
            if datatype_group:
                datatype_index.fk_datatype_group = datatype_group.id
                if len(datatype_group.subject) == 0:
                    datatype_group.subject = datatype_index.subject
                    dao.store_entity(datatype_group)
            datatype_index.fk_from_operation = op_id

            associated_file = h5.path_for_stored_index(datatype_index)
            if os.path.exists(associated_file):
                datatype_index.disk_size = StorageInterface.compute_size_on_disk(
                    associated_file)
            result = datatype_index

        return result
Esempio n. 17
0
 def _load_h5_of_gid(self, entity_gid):
     entity_index = self.load_entity_by_gid(entity_gid)
     entity_h5_class = h5.REGISTRY.get_h5file_for_index(type(entity_index))
     entity_h5_path = h5.path_for_stored_index(entity_index)
     return entity_h5_class, entity_h5_path
Esempio n. 18
0
    def launch(self, simulator_gid):
        """
        Called from the GUI to launch a simulation.
          *: string class name of chosen model, etc...
          *_parameters: dictionary of parameters for chosen model, etc...
          connectivity: tvb.datatypes.connectivity.Connectivity object.
          surface: tvb.datatypes.surfaces.CorticalSurface: or None.
          stimulus: tvb.datatypes.patters.* object
        """
        result_h5 = dict()
        result_indexes = dict()
        start_time = self.algorithm.current_step * self.algorithm.integrator.dt

        self.algorithm.configure(full_configure=False)
        if self.branch_simulation_state_gid is not None:
            simulation_state_index = dao.get_datatype_by_gid(
                self.branch_simulation_state_gid.hex)
            self.branch_simulation_state_path = h5.path_for_stored_index(
                simulation_state_index)

            with SimulationStateH5(self.branch_simulation_state_path
                                   ) as branch_simulation_state_h5:
                branch_simulation_state_h5.load_into(self.algorithm)

        region_map, region_volume_map = self._try_load_region_mapping()

        for monitor in self.algorithm.monitors:
            m_name = monitor.__class__.__name__
            ts = monitor.create_time_series(self.algorithm.connectivity,
                                            self.algorithm.surface, region_map,
                                            region_volume_map)
            self.log.debug("Monitor created the TS")
            ts.start_time = start_time

            ts_index_class = h5.REGISTRY.get_index_for_datatype(type(ts))
            ts_index = ts_index_class()
            ts_index.fill_from_has_traits(ts)
            ts_index.data_ndim = 4
            ts_index.state = 'INTERMEDIATE'

            # state_variable_dimension_name = ts.labels_ordering[1]
            # if ts_index.user_tag_1:
            #     ts_index.labels_dimensions[state_variable_dimension_name] = ts.user_tag_1.split(';')
            # elif m_name in self.HAVE_STATE_VARIABLES:
            #     selected_vois = [self.algorithm.model.variables_of_interest[idx] for idx in monitor.voi]
            #     ts.labels_dimensions[state_variable_dimension_name] = selected_vois

            ts_h5_class = h5.REGISTRY.get_h5file_for_datatype(type(ts))
            ts_h5_path = h5.path_for(self.storage_path, ts_h5_class, ts.gid)
            ts_h5 = ts_h5_class(ts_h5_path)
            ts_h5.store(ts, scalars_only=True, store_references=False)
            ts_h5.sample_rate.store(ts.sample_rate)
            ts_h5.nr_dimensions.store(ts_index.data_ndim)

            if self.algorithm.surface:
                ts_index.surface_gid = self.algorithm.surface.region_mapping_data.surface.gid.hex
                ts_h5.surface.store(self.algorithm.surface.gid)
            else:
                ts_index.connectivity_gid = self.algorithm.connectivity.gid.hex
                ts_h5.connectivity.store(self.algorithm.connectivity.gid)
                if region_map:
                    ts_index.region_mapping_gid = region_map.gid.hex
                    ts_h5.region_mapping.store(region_map.gid)
                if region_volume_map:
                    ts_index.region_mapping_volume_gid = region_volume_map.gid.hex
                    ts_h5.region_mapping_volume.store(region_volume_map.gid)

            result_indexes[m_name] = ts_index
            result_h5[m_name] = ts_h5

        # Run simulation
        self.log.debug("Starting simulation...")
        for result in self.algorithm(simulation_length=self.simulation_length):
            for j, monitor in enumerate(self.algorithm.monitors):
                if result[j] is not None:
                    m_name = monitor.__class__.__name__
                    ts_h5 = result_h5[m_name]
                    ts_h5.write_time_slice([result[j][0]])
                    ts_h5.write_data_slice([result[j][1]])

        self.log.debug(
            "Completed simulation, starting to store simulation state ")
        # Populate H5 file for simulator state. This step could also be done while running sim, in background.
        if not self._is_group_launch():
            simulation_state_index = SimulationStateIndex()
            simulation_state_path = h5.path_for(self.storage_path,
                                                SimulationStateH5,
                                                self.algorithm.gid)
            with SimulationStateH5(
                    simulation_state_path) as simulation_state_h5:
                simulation_state_h5.store(self.algorithm)
            self._capture_operation_results([simulation_state_index])

        self.log.debug("Simulation state persisted, returning results ")
        for monitor in self.algorithm.monitors:
            m_name = monitor.__class__.__name__
            ts_shape = result_h5[m_name].read_data_shape()
            result_indexes[m_name].fill_shape(ts_shape)
            result_h5[m_name].close()
        # self.log.info("%s: Adapter simulation finished!!" % str(self))
        return list(result_indexes.values())
Esempio n. 19
0
 def load_with_references(self, dt_gid, dt_class=None):
     # type: (typing.Union[uuid.UUID, str], typing.Type[HasTraits]) -> HasTraits
     dt_index = ABCAdapter.load_entity_by_gid(dt_gid)
     h5_path = h5.path_for_stored_index(dt_index)
     dt, _ = h5.load_with_references(h5_path)
     return dt
Esempio n. 20
0
    def _remove_project_node_files(self, project_id, gid, skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)
            links = dao.get_links_for_datatype(datatype.id)

            op = dao.get_operation_by_id(datatype.fk_from_operation)
            adapter = ABCAdapter.build_adapter(op.algorithm)
            if links:
                was_link = False
                for link in links:
                    # This means it's only a link and we need to remove it
                    if link.fk_from_datatype == datatype.id and link.fk_to_project == project.id:
                        dao.remove_entity(Links, link.id)
                        was_link = True
                if not was_link:
                    # Create a clone of the operation
                    # There is no view_model so the view_model_gid is None

                    new_op = Operation(op.view_model_gid,
                                       dao.get_system_user().id,
                                       links[0].fk_to_project,
                                       datatype.parent_operation.fk_from_algo,
                                       datatype.parent_operation.status,
                                       datatype.parent_operation.start_date,
                                       datatype.parent_operation.completion_date,
                                       datatype.parent_operation.fk_operation_group,
                                       datatype.parent_operation.additional_info,
                                       datatype.parent_operation.user_group,
                                       datatype.parent_operation.range_values)
                    new_op = dao.store_entity(new_op)
                    to_project = self.find_project(links[0].fk_to_project)
                    to_project_path = self.structure_helper.get_project_folder(to_project)

                    encryption_handler.set_project_active(to_project)
                    encryption_handler.sync_folders(to_project_path)
                    to_project_name = to_project.name

                    full_path = h5.path_for_stored_index(datatype)
                    self.structure_helper.move_datatype(datatype, to_project_name, str(new_op.id), full_path)
                    # Move also the ViewModel H5
                    old_folder = self.structure_helper.get_project_folder(project, str(op.id))
                    view_model = adapter.load_view_model(op)
                    vm_full_path = h5.determine_filepath(op.view_model_gid, old_folder)
                    self.structure_helper.move_datatype(view_model, to_project_name, str(new_op.id), vm_full_path)

                    encryption_handler.sync_folders(to_project_path)
                    encryption_handler.set_project_inactive(to_project)

                    datatype.fk_from_operation = new_op.id
                    datatype.parent_operation = new_op
                    dao.store_entity(datatype)
                    dao.remove_entity(Links, links[0].id)
            else:
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)
                h5_path = h5.path_for_stored_index(datatype)
                self.structure_helper.remove_datatype_file(h5_path)
                encryption_handler.push_folder_to_sync(self.structure_helper.get_project_folder_from_h5(h5_path))

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException("Remove operation failed for unknown reasons.Please contact system administrator.")
Esempio n. 21
0
    def _edit_data(self, datatype, new_data, from_group=False):
        # type: (DataType, dict, bool) -> None
        """
        Private method, used for editing a meta-data XML file and a DataType row
        for a given custom DataType entity with new dictionary of data from UI.
        """
        # 1. First update Operation fields:
        #    Update group field if possible
        new_group_name = new_data[CommonDetails.CODE_OPERATION_TAG]
        empty_group_value = (new_group_name is None or new_group_name == "")
        if from_group:
            if empty_group_value:
                raise StructureException("Empty group is not allowed!")

            group = dao.get_generic_entity(OperationGroup, new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
            if group and len(group) > 0 and new_group_name != group[0].name:
                group = group[0]
                exists_group = dao.get_generic_entity(OperationGroup, new_group_name, 'name')
                if exists_group:
                    raise StructureException("Group '" + new_group_name + "' already exists.")
                group.name = new_group_name
                dao.store_entity(group)
        else:
            operation = dao.get_operation_by_id(datatype.fk_from_operation)
            operation.user_group = new_group_name
            dao.store_entity(operation)
            op_folder = self.structure_helper.get_project_folder(operation.project, str(operation.id))
            vm_gid = operation.view_model_gid
            view_model_file = h5.determine_filepath(vm_gid, op_folder)
            if view_model_file:
                view_model_class = H5File.determine_type(view_model_file)
                view_model = view_model_class()
                with ViewModelH5(view_model_file, view_model) as f:
                    ga = f.load_generic_attributes()
                    ga.operation_tag = new_group_name
                    f.store_generic_attributes(ga, False)
            else:
                self.logger.warning("Could not find ViewModel H5 file for op: {}".format(operation))

        # 2. Update GenericAttributes in the associated H5 files:
        h5_path = h5.path_for_stored_index(datatype)
        with H5File.from_file(h5_path) as f:
            ga = f.load_generic_attributes()

            ga.subject = new_data[DataTypeOverlayDetails.DATA_SUBJECT]
            ga.state = new_data[DataTypeOverlayDetails.DATA_STATE]
            ga.operation_tag = new_group_name
            if DataTypeOverlayDetails.DATA_TAG_1 in new_data:
                ga.user_tag_1 = new_data[DataTypeOverlayDetails.DATA_TAG_1]
            if DataTypeOverlayDetails.DATA_TAG_2 in new_data:
                ga.user_tag_2 = new_data[DataTypeOverlayDetails.DATA_TAG_2]
            if DataTypeOverlayDetails.DATA_TAG_3 in new_data:
                ga.user_tag_3 = new_data[DataTypeOverlayDetails.DATA_TAG_3]
            if DataTypeOverlayDetails.DATA_TAG_4 in new_data:
                ga.user_tag_4 = new_data[DataTypeOverlayDetails.DATA_TAG_4]
            if DataTypeOverlayDetails.DATA_TAG_5 in new_data:
                ga.user_tag_5 = new_data[DataTypeOverlayDetails.DATA_TAG_5]

            f.store_generic_attributes(ga, False)

        # 3. Update MetaData in DT Index DB as well.
        datatype.fill_from_generic_attributes(ga)
        dao.store_entity(datatype)
Esempio n. 22
0
    def launch(self, view_model):
        # type: (TVBImporterModel) -> []
        """
        Execute import operations: unpack ZIP, build and store generic DataType objects.
        :raises LaunchException: when data_file is None, nonexistent, or invalid \
                    (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. )
        """
        if view_model.data_file is None:
            raise LaunchException(
                "Please select file which contains data to import")

        service = ImportService()
        if os.path.exists(view_model.data_file):
            current_op = dao.get_operation_by_id(self.operation_id)
            if zipfile.is_zipfile(view_model.data_file):
                # Creates a new TMP folder where to extract data
                tmp_folder = os.path.join(self.storage_path, "tmp_import")
                self.storage_interface.unpack_zip(view_model.data_file,
                                                  tmp_folder)
                is_group = False
                current_op_id = current_op.id
                for file in os.listdir(tmp_folder):
                    # In case we import a DatatypeGroup, we want the default import flow
                    if os.path.isdir(os.path.join(tmp_folder, file)):
                        current_op_id = None
                        is_group = True
                        break
                try:
                    operations, all_dts, stored_dts_count = service.import_project_operations(
                        current_op.project, tmp_folder, is_group,
                        current_op_id)
                    self.nr_of_datatypes += stored_dts_count
                    if stored_dts_count == 0:
                        current_op.additional_info = 'All chosen datatypes already exist!'
                        dao.store_entity(current_op)
                    elif stored_dts_count < all_dts:
                        current_op.additional_info = 'Part of the chosen datatypes already exist!'
                        dao.store_entity(current_op)
                except ImportException as excep:
                    self.log.exception(excep)
                    current_op.additional_info = excep.message
                    current_op.status = STATUS_ERROR
                    raise LaunchException("Invalid file received as input. " +
                                          str(excep))
                finally:
                    shutil.rmtree(tmp_folder)
            else:
                # upgrade file if necessary
                file_update_manager = FilesUpdateManager()
                file_update_manager.upgrade_file(view_model.data_file)

                if self.storage_interface.get_storage_manager(
                        view_model.data_file).is_valid_tvb_file():
                    datatype = None
                    try:
                        datatype = service.load_datatype_from_file(
                            view_model.data_file, self.operation_id)
                        stored_new_dt = service.store_or_link_datatype(
                            datatype, view_model.data_file,
                            current_op.project.id)
                        if stored_new_dt == 0:
                            current_op.additional_info = 'The chosen datatype already exists!'
                            dao.store_entity(current_op)
                        self.nr_of_datatypes += stored_new_dt
                    except ImportException as excep:
                        self.log.exception(excep)
                        if datatype is not None:
                            target_path = h5.path_for_stored_index(datatype)
                            if os.path.exists(target_path):
                                os.remove(target_path)
                        raise LaunchException(
                            "Invalid file received as input. " + str(excep))
                else:
                    raise LaunchException(
                        "Uploaded file: %s is neither in ZIP or HDF5 format" %
                        view_model.data_file)

        else:
            raise LaunchException("File: %s to import does not exists." %
                                  view_model.data_file)