def test_change_filename_or_overwrite_always_overwrite(self):
        filename = "Test.h5"
        test_file = os.path.join(self.config.out.FOLDER_TEMP, filename)
        self.writer.write_dictionary({"a": [1, 2, 3]}, test_file)

        assert os.path.exists(test_file)

        change_filename_or_overwrite(test_file, True)

        assert not os.path.exists(test_file)
Exemple #2
0
    def write_surface(self, surface, path):
        """
        :param surface: Surface object to write in H5
        :param path: H5 path to be written
        """
        h5_file = h5py.File(change_filename_or_overwrite(path),
                            'a',
                            libver='latest')

        h5_file.create_dataset(SurfaceH5Field.VERTICES, data=surface.vertices)
        h5_file.create_dataset(SurfaceH5Field.TRIANGLES,
                               data=surface.triangles)
        h5_file.create_dataset(SurfaceH5Field.VERTEX_NORMALS,
                               data=surface.vertex_normals)

        h5_file.attrs.create(self.H5_TYPE_ATTRIBUTE, "Surface")
        h5_file.attrs.create("Surface_subtype", surface.surface_subtype)
        h5_file.attrs.create("Number_of_triangles", surface.triangles.shape[0])
        h5_file.attrs.create("Number_of_vertices", surface.vertices.shape[0])
        h5_file.attrs.create(
            "Voxel_to_ras_matrix",
            str(surface.vox2ras.flatten().tolist())[1:-1].replace(",", ""))

        self.logger.info("Surface has been written to file: %s" % path)
        h5_file.close()
Exemple #3
0
    def write_sensors(self, sensors, path):
        """
        :param sensors: Sensors object to write in H5
        :param path: H5 path to be written
        """
        h5_file = h5py.File(change_filename_or_overwrite(path),
                            'a',
                            libver='latest')

        h5_file.create_dataset(SensorsH5Field.LABELS, data=sensors.labels)
        h5_file.create_dataset(SensorsH5Field.LOCATIONS,
                               data=sensors.locations)
        h5_file.create_dataset(SensorsH5Field.NEEDLES, data=sensors.needles)

        gain_dataset = h5_file.create_dataset(SensorsH5Field.GAIN_MATRIX,
                                              data=sensors.gain_matrix)
        gain_dataset.attrs.create("Max", str(sensors.gain_matrix.max()))
        gain_dataset.attrs.create("Min", str(sensors.gain_matrix.min()))

        h5_file.attrs.create(self.H5_TYPE_ATTRIBUTE, "Sensors")
        h5_file.attrs.create("Number_of_sensors",
                             str(sensors.number_of_sensors))
        h5_file.attrs.create("Sensors_subtype", sensors.s_type)

        self.logger.info("Sensors have been written to file: %s" % path)
        h5_file.close()
Exemple #4
0
    def write_dictionary(self, dictionary, path):
        """
        :param dictionary: dictionary to write in H5
        :param path: H5 path to be written
        """
        h5_file = h5py.File(change_filename_or_overwrite(path),
                            'a',
                            libver='latest')

        for key, value in dictionary.iteritems():
            try:
                if isinstance(value, numpy.ndarray) and value.size > 0:
                    h5_file.create_dataset(key, data=value)
                else:
                    if isinstance(value, list) and len(value) > 0:
                        h5_file.create_dataset(key, data=value)
                    else:
                        h5_file.attrs.create(key, value)
            except:
                self.logger.warning("Did not manage to write " + key +
                                    " to h5 file " + path + " !")

        h5_file.attrs.create(self.H5_TYPE_ATTRIBUTE, "HypothesisModel")
        h5_file.attrs.create(self.H5_SUBTYPE_ATTRIBUTE,
                             dictionary.__class__.__name__)

        h5_file.close()
Exemple #5
0
    def write_model_inversion_service(self, model_inversion_service, path):
        """
        :param model_inversion_service: ModelInversionService object to write in H5
        :param path: H5 path to be written
        """
        if getattr(model_inversion_service, "signals_inds", None) is not None:
            model_inversion_service.signals_inds = numpy.array(
                model_inversion_service.signals_inds)

        h5_file = h5py.File(change_filename_or_overwrite(path),
                            'a',
                            libver='latest')

        datasets_dict, metadata_dict = self._determine_datasets_and_attributes(
            model_inversion_service)

        for key, value in datasets_dict.iteritems():
            h5_file.create_dataset(key, data=value)

        h5_file.attrs.create(self.H5_TYPE_ATTRIBUTE, "HypothesisModel")
        h5_file.attrs.create(self.H5_SUBTYPE_ATTRIBUTE,
                             model_inversion_service.__class__.__name__)

        for key, value in metadata_dict.iteritems():
            h5_file.attrs.create(key, value)

        h5_file.close()
Exemple #6
0
    def write_hypothesis(self, hypothesis, path):
        """
        :param hypothesis: DiseaseHypothesis object to write in H5
        :param path: H5 path to be written
        """
        h5_file = h5py.File(change_filename_or_overwrite(path),
                            'a',
                            libver='latest')

        h5_file.create_dataset("x0_values", data=hypothesis.x0_values)
        h5_file.create_dataset("e_values", data=hypothesis.e_values)
        h5_file.create_dataset("w_values", data=hypothesis.w_values)
        h5_file.create_dataset("lsa_propagation_strengths",
                               data=hypothesis.lsa_propagation_strengths)

        # TODO: change HypothesisModel to GenericModel here and inside Epi
        h5_file.attrs.create(self.H5_TYPE_ATTRIBUTE, "HypothesisModel")
        h5_file.attrs.create(self.H5_SUBTYPE_ATTRIBUTE,
                             hypothesis.__class__.__name__)
        h5_file.attrs.create("number_of_regions", hypothesis.number_of_regions)
        h5_file.attrs.create("type", hypothesis.type)
        h5_file.attrs.create("x0_indices", hypothesis.x0_indices)
        h5_file.attrs.create("e_indices", hypothesis.e_indices)
        h5_file.attrs.create("w_indices", hypothesis.w_indices)
        h5_file.attrs.create("lsa_propagation_indices",
                             hypothesis.lsa_propagation_indices)

        h5_file.close()
Exemple #7
0
    def write_ts_epi(self, raw_ts, sampling_period, path, source_ts=None):
        path = change_filename_or_overwrite(os.path.join(path))

        if raw_ts is None or len(raw_ts.squeezed.shape) != 3:
            raise_value_error(
                "Invalid TS data 3D (time, regions, sv) expected", self.logger)
        self.logger.info("Writing a TS at:\n" + path)
        if source_ts is None:
            source_ts = raw_ts.source
        h5_file = h5py.File(path, 'a', libver='latest')
        h5_file.create_dataset("/data", data=raw_ts.squeezed)
        h5_file.create_dataset("/lfpdata", data=source_ts.squeezed)
        write_metadata({KEY_TYPE: "TimeSeries"}, h5_file, KEY_DATE,
                       KEY_VERSION)
        write_metadata(
            {
                KEY_MAX: raw_ts.squeezed.max(),
                KEY_MIN: raw_ts.squeezed.min(),
                KEY_STEPS: raw_ts.squeezed.shape[0],
                KEY_CHANNELS: raw_ts.squeezed.shape[1],
                KEY_SV: raw_ts.squeezed.shape[2],
                KEY_SAMPLING: sampling_period,
                KEY_START: raw_ts.time_start
            }, h5_file, KEY_DATE, KEY_VERSION, "/data")
        write_metadata(
            {
                KEY_MAX: source_ts.squeezed.max(),
                KEY_MIN: source_ts.squeezed.min(),
                KEY_STEPS: source_ts.squeezed.shape[0],
                KEY_CHANNELS: source_ts.squeezed.shape[1],
                KEY_SV: 1,
                KEY_SAMPLING: sampling_period,
                KEY_START: source_ts.time_start
            }, h5_file, KEY_DATE, KEY_VERSION, "/lfpdata")
        h5_file.close()
Exemple #8
0
 def write_object_to_file(self,
                          path,
                          object,
                          h5_type_attribute="HypothesisModel",
                          nr_regions=None):
     h5_file = h5py.File(change_filename_or_overwrite(path),
                         'a',
                         libver='latest')
     h5_file, _ = self._prepare_object_for_group(h5_file, object,
                                                 h5_type_attribute,
                                                 nr_regions)
     h5_file.close()
Exemple #9
0
    def write_ts(self, raw_data, sampling_period, path):
        path = change_filename_or_overwrite(path)

        self.logger.info("Writing a TS at:\n" + path)
        h5_file = h5py.File(path, 'a', libver='latest')
        write_metadata({KEY_TYPE: "TimeSeries"}, h5_file, KEY_DATE,
                       KEY_VERSION)
        if isinstance(raw_data, dict):
            for data in raw_data:
                if len(raw_data[data].shape) == 2 and str(
                        raw_data[data].dtype)[0] == "f":
                    h5_file.create_dataset("/" + data, data=raw_data[data])
                    write_metadata(
                        {
                            KEY_MAX: raw_data[data].max(),
                            KEY_MIN: raw_data[data].min(),
                            KEY_STEPS: raw_data[data].shape[0],
                            KEY_CHANNELS: raw_data[data].shape[1],
                            KEY_SV: 1,
                            KEY_SAMPLING: sampling_period,
                            KEY_START: 0.0
                        }, h5_file, KEY_DATE, KEY_VERSION, "/" + data)
                else:
                    raise_value_error(
                        "Invalid TS data. 2D (time, nodes) numpy.ndarray of floats expected"
                    )
        elif isinstance(raw_data, numpy.ndarray):
            if len(raw_data.shape) != 2 and str(raw_data.dtype)[0] != "f":
                h5_file.create_dataset("/data", data=raw_data)
                write_metadata(
                    {
                        KEY_MAX: raw_data.max(),
                        KEY_MIN: raw_data.min(),
                        KEY_STEPS: raw_data.shape[0],
                        KEY_CHANNELS: raw_data.shape[1],
                        KEY_SV: 1,
                        KEY_SAMPLING: sampling_period,
                        KEY_START: 0.0
                    }, h5_file, KEY_DATE, KEY_VERSION, "/data")
            else:
                raise_value_error(
                    "Invalid TS data. 2D (time, nodes) numpy.ndarray of floats expected"
                )
        else:
            raise_value_error(
                "Invalid TS data. Dictionary or 2D (time, nodes) numpy.ndarray of floats expected"
            )
        h5_file.close()
Exemple #10
0
    def write_ts_epi(self, raw_data, sampling_period, path, lfp_data=None):
        path = change_filename_or_overwrite(os.path.join(path))

        if raw_data is None or len(raw_data.shape) != 3:
            raise_value_error(
                "Invalid TS data 3D (time, regions, sv) expected", self.logger)
        self.logger.info("Writing a TS at:\n" + path)
        if type(lfp_data) == int:
            lfp_data = raw_data[:, :, lfp_data[1]]
            raw_data[:, :, lfp_data[1]] = []
        elif isinstance(lfp_data, list):
            lfp_data = raw_data[:, :, lfp_data[1]] - raw_data[:, :,
                                                              lfp_data[0]]
        elif isinstance(lfp_data, numpy.ndarray):
            lfp_data = lfp_data.reshape(
                (lfp_data.shape[0], lfp_data.shape[1], 1))
        else:
            raise_value_error(
                "Invalid lfp_data 3D (time, regions, sv) expected",
                self.logger)
        h5_file = h5py.File(path, 'a', libver='latest')
        h5_file.create_dataset("/data", data=raw_data)
        h5_file.create_dataset("/lfpdata", data=lfp_data)
        write_metadata({KEY_TYPE: "TimeSeries"}, h5_file, KEY_DATE,
                       KEY_VERSION)
        write_metadata(
            {
                KEY_MAX: raw_data.max(),
                KEY_MIN: raw_data.min(),
                KEY_STEPS: raw_data.shape[0],
                KEY_CHANNELS: raw_data.shape[1],
                KEY_SV: raw_data.shape[2],
                KEY_SAMPLING: sampling_period,
                KEY_START: 0.0
            }, h5_file, KEY_DATE, KEY_VERSION, "/data")
        write_metadata(
            {
                KEY_MAX: lfp_data.max(),
                KEY_MIN: lfp_data.min(),
                KEY_STEPS: lfp_data.shape[0],
                KEY_CHANNELS: lfp_data.shape[1],
                KEY_SV: 1,
                KEY_SAMPLING: sampling_period,
                KEY_START: 0.0
            }, h5_file, KEY_DATE, KEY_VERSION, "/lfpdata")
        h5_file.close()
Exemple #11
0
 def write_to_h5(self, path):
     """
     Store H5Model object to a hdf5 file
     """
     final_path = change_filename_or_overwrite(path)
     # final_path = ensure_unique_file(folder_name, file_name)
     logger.info("Writing %s at: %s" % (self, final_path))
     h5_file = h5py.File(final_path, 'a', libver='latest')
     for attribute, field in self.datasets_dict.iteritems():
         h5_file.create_dataset(attribute, data=field)
     for meta, val in self.metadata_dict.iteritems():
         dataset_path, attribute_name = os.path.split(meta)
         if dataset_path == "":
             h5_file.attrs.create(attribute_name, val)
         else:
             try:
                 h5_file[dataset_path].attrs.create(attribute_name, val)
             except:
                 print("WTF")
     h5_file.close()
Exemple #12
0
    def write_simulation_settings(self, simulation_settings, path):
        """
        :param simulation_settings: SimulationSettings object to write in H5
        :param path: H5 path to be written
        """
        h5_file = h5py.File(change_filename_or_overwrite(path),
                            'a',
                            libver='latest')

        datasets_dict, metadata_dict = self._determine_datasets_and_attributes(
            simulation_settings)

        for key, value in datasets_dict.iteritems():
            h5_file.create_dataset(key, data=value)

        h5_file.attrs.create(self.H5_TYPE_ATTRIBUTE, "HypothesisModel")
        h5_file.attrs.create(self.H5_SUBTYPE_ATTRIBUTE,
                             simulation_settings.__class__.__name__)

        for key, value in metadata_dict.iteritems():
            h5_file.attrs.create(key, value)

        h5_file.close()
Exemple #13
0
    def write_lsa_service(self, lsa_service, path):
        """
        :param lsa_service: LSAService object to write in H5
        :param path: H5 path to be written
        """
        h5_file = h5py.File(change_filename_or_overwrite(path),
                            'a',
                            libver='latest')

        datasets_dict, metadata_dict = self._determine_datasets_and_attributes(
            lsa_service)

        for key, value in datasets_dict.iteritems():
            h5_file.create_dataset(key, data=value)

        h5_file.attrs.create(self.H5_TYPE_ATTRIBUTE, "HypothesisModel")
        h5_file.attrs.create(self.H5_SUBTYPE_ATTRIBUTE,
                             lsa_service.__class__.__name__)

        for key, value in metadata_dict.iteritems():
            h5_file.attrs.create(key, value)

        h5_file.close()
Exemple #14
0
    def write_model_configuration(self, model_configuration, path):
        """
        :param model_configuration: ModelConfiguration object to write in H5
        :param path: H5 path to be written
        """
        h5_file = h5py.File(change_filename_or_overwrite(path),
                            'a',
                            libver='latest')

        datasets_dict, metadata_dict = self._determine_datasets_and_attributes(
            model_configuration)

        for key, value in datasets_dict.iteritems():
            h5_file.create_dataset(key, data=value)

        h5_file.attrs.create(self.H5_TYPE_ATTRIBUTE, "HypothesisModel")
        h5_file.attrs.create(self.H5_SUBTYPE_ATTRIBUTE,
                             model_configuration.__class__.__name__)

        for key, value in metadata_dict.iteritems():
            h5_file.attrs.create(key, value)

        h5_file.close()
Exemple #15
0
    def write_connectivity(self, connectivity, path):
        """
        :param connectivity: Connectivity object to be written in H5
        :param path: H5 path to be written
        """
        h5_file = h5py.File(change_filename_or_overwrite(path),
                            'a',
                            libver='latest')

        h5_file.create_dataset(ConnectivityH5Field.WEIGHTS,
                               data=connectivity.weights)
        h5_file.create_dataset(ConnectivityH5Field.TRACTS,
                               data=connectivity.tract_lengths)
        h5_file.create_dataset(ConnectivityH5Field.CENTERS,
                               data=connectivity.centres)
        h5_file.create_dataset(ConnectivityH5Field.REGION_LABELS,
                               data=connectivity.region_labels)
        h5_file.create_dataset(ConnectivityH5Field.ORIENTATIONS,
                               data=connectivity.orientations)
        h5_file.create_dataset(ConnectivityH5Field.HEMISPHERES,
                               data=connectivity.hemispheres)

        h5_file.attrs.create(self.H5_TYPE_ATTRIBUTE, "Connectivity")
        h5_file.attrs.create("Number_of_regions",
                             str(connectivity.number_of_regions))

        if connectivity.normalized_weights.size > 0:
            dataset = h5_file.create_dataset(
                "normalized_weights/" + ConnectivityH5Field.WEIGHTS,
                data=connectivity.normalized_weights)
            dataset.attrs.create(
                "Operations",
                "Removing diagonal, normalizing with 95th percentile, and ceiling to it"
            )

        self.logger.info("Connectivity has been written to file: %s" % path)
        h5_file.close()
Exemple #16
0
    def write_probabilistic_model(self, probabilistic_model, nr_regions, path):
        """
        :param object:
        :param path:H5 path to be written
        """
        def _set_parameter_to_group(parent_group,
                                    parameter,
                                    nr_regions,
                                    param_name=None):
            if param_name is None:
                this_param_group = parent_group.create_group(parameter.name)
            else:
                this_param_group = parent_group.create_group(param_name)
            this_param_group, parameter_subgroups = \
                self._prepare_object_for_group(this_param_group, parameter, nr_regions=nr_regions)
            for param_subgroup_key in parameter_subgroups:
                if param_subgroup_key.find("p_shape") >= 0:
                    this_param_group[param_subgroup_key] = numpy.array(
                        getattr(param_value, param_subgroup_key))
                elif param_subgroup_key == "star":
                    this_param_group, parameter_subgroup = \
                        _set_parameter_to_group(this_param_group, parameter.star, nr_regions, "star")
                else:
                    parameter_subgroup = param_group.create_group(
                        param_subgroup_key)
                    parameter_subgroup, _ = self._prepare_object_for_group(
                        parameter_subgroup,
                        getattr(param_value, param_subgroup_key), nr_regions)
            return parent_group, this_param_group

        h5_file = h5py.File(change_filename_or_overwrite(path),
                            'a',
                            libver='latest')

        datasets_dict, metadata_dict, groups_keys = self._determine_datasets_and_attributes(
            probabilistic_model, nr_regions)
        h5_file.attrs.create(self.H5_TYPE_ATTRIBUTE, "HypothesisModel")
        h5_file.attrs.create(self.H5_SUBTYPE_ATTRIBUTE,
                             probabilistic_model.__class__.__name__)

        self._write_dicts_at_location(datasets_dict, metadata_dict, h5_file)

        for group_key in groups_keys:
            if group_key == "active_regions":
                h5_file.create_dataset(group_key,
                                       data=numpy.array(
                                           probabilistic_model.active_regions))

            elif group_key == "parameters":
                group = h5_file.create_group(group_key)
                group.attrs.create(
                    self.H5_SUBTYPE_ATTRIBUTE,
                    probabilistic_model.parameters.__class__.__name__)
                for param_key, param_value in probabilistic_model.parameters.iteritems(
                ):
                    group, param_group = _set_parameter_to_group(
                        group, param_value, nr_regions, param_key)

            else:
                group = h5_file.create_group(group_key)
                group.attrs.create(
                    self.H5_SUBTYPE_ATTRIBUTE,
                    getattr(probabilistic_model, group_key).__class__.__name__)
                group, _ = self._prepare_object_for_group(
                    group, getattr(probabilistic_model, group_key), nr_regions)

        h5_file.close()