예제 #1
0
class FRETTest(unittest.TestCase):
    def setUp(self):
        self.nwbfile = NWBFile('description', 'id',
                               datetime.now().astimezone())

    def test_add_fretseries(self):
        # Create and add device
        device = Device(name='Device')
        self.nwbfile.add_device(device)

        # Create optical channels
        opt_ch_d = OpticalChannel(name='optical_channel',
                                  description='optical_channel_description',
                                  emission_lambda=529.)
        opt_ch_a = OpticalChannel(name='optical_channel',
                                  description='optical_channel_description',
                                  emission_lambda=633.)

        # Create FRET
        fs_d = FRETSeries(name='donor',
                          fluorophore='mCitrine',
                          optical_channel=opt_ch_d,
                          device=device,
                          description='description of donor series',
                          data=np.random.randn(100, 10, 10),
                          rate=200.,
                          unit='no unit')
        fs_a = FRETSeries(name='acceptor',
                          fluorophore='mKate2',
                          optical_channel=opt_ch_a,
                          device=device,
                          description='description of acceptor series',
                          data=np.random.randn(100, 10, 10),
                          rate=200.,
                          unit='no unit')

        fret = FRET(name='FRET',
                    excitation_lambda=482.,
                    donor=fs_d,
                    acceptor=fs_a)
        self.nwbfile.add_acquisition(fret)

        filename = 'test_fret.nwb'

        with NWBHDF5IO(filename, 'w') as io:
            io.write(self.nwbfile)

        with NWBHDF5IO(filename, mode='r', load_namespaces=True) as io:
            io.read()

        os.remove(filename)
예제 #2
0
    def test_nwbfileio(self):
        testdir = tempfile.mkdtemp()
        nwbfile = NWBFile(**temp_session_nwbfile)
        nwbfile.add_lab_meta_data(IblSessionData(**temp_sessions))
        nwbfile.subject = IblSubject(**temp_subject)
        for i, name in zip(temp_probes, probe_names):
            nwbfile.add_device(IblProbes(name, **i))
        saveloc = os.path.join(testdir, 'test.nwb')
        with NWBHDF5IO(saveloc, mode='w') as io:
            io.write(nwbfile)

        with NWBHDF5IO(saveloc, mode='r', load_namespaces=True) as io:
            read_nwbfile = io.read()
            for i, j in temp_sessions.items():
                attr_loop = getattr(read_nwbfile.lab_meta_data['Ibl_session_data'], i, None)
                if attr_loop:
                    if isinstance(attr_loop, h5py._hl.dataset.Dataset):
                        assert all(getattr(read_nwbfile.lab_meta_data['Ibl_session_data'], i).value == j)
                    else:
                        assert getattr(read_nwbfile.lab_meta_data['Ibl_session_data'], i) == j
            for i, j in temp_subject.items():
                attr_loop = getattr(read_nwbfile.subject, i, None)
                if attr_loop:
                    if isinstance(attr_loop, h5py._hl.dataset.Dataset):
                        assert all(getattr(read_nwbfile.subject, i).value == j)
                    else:
                        assert getattr(read_nwbfile.subject, i) == j

            for no,probe_name in enumerate(probe_names):
                for i, j in temp_probes[no].items():
                    attr_loop = getattr(read_nwbfile.devices[probe_name], i, None)
                    if attr_loop:
                        if isinstance(attr_loop, h5py._hl.dataset.Dataset):
                            assert all(getattr(read_nwbfile.devices[probe_name], i).value == j)
                        else:
                            assert getattr(read_nwbfile.devices[probe_name], i) == j

        shutil.rmtree(testdir)
예제 #3
0
class Preprocessing:
    def load_yaml(self):
        # Open YAML file with metadata if existing then dump all data in a dict
        if os.path.isfile("data.yaml"):
            with open("data.yaml", 'r') as stream:
                self.data = yaml.safe_load(stream)
        # Same but with .yml extension
        elif os.path.isfile("data.yml"):
            with open("data.yml", 'r') as stream:
                self.data = yaml.safe_load(stream)
        else:
            self.data = dict()
        if self.data is None:
            self.data = dict()
        # Dump the dict in the YAML file to save what the user inputted for future use
        with open('data.yaml', 'w') as outfile:
            yaml.dump(self.data,
                      outfile,
                      default_flow_style=False,
                      allow_unicode=True)

    def add_required_metadata(self, data, key, metadata_type):
        # Prompt user to give required metadata
        # Need to be wary of the type (put " " for string and datetime.datetime(%Y, %m, %d) for datetime)
        print("Missing required " + metadata_type + " metadata : " + key +
              "\n")
        metadata_value = input("Type the value (with respect of the type) : ")
        data[key] = eval(metadata_value)
        # Dump the dict in the YAML file to save what the user inputted for future use
        with open('data.yaml', 'w') as outfile:
            yaml.dump(self.data,
                      outfile,
                      default_flow_style=False,
                      allow_unicode=True)
        return data

    def add_optional_metadata(self, data):
        # Allow user to add as much metadata as he wants with the key he wants
        # Need to refer to documentation if he wants to fill existing attributes but he can create new ones and use
        # them in his own code
        keyboard_input = False
        while not keyboard_input:
            # Prompt user to give optional metadata
            # Need to be wary of the type (put " " for string and datetime.datetime(%Y, %m, %d) for datetime)
            metadata_key = input(
                "Type the name of the metadata you want to add ? ")
            metadata_key.replace(" ", "").lower().replace("", "_")
            metadata_value = input(
                "Type the value (with respect of the type) : ")
            data[metadata_key] = eval(metadata_value)
            go = input("Continue ? (yes/no)")
            # Prevent errors if other input than yes/np
            while go.replace(" ", "").lower() != "no" and go.replace(
                    " ", "").lower() != "yes":
                go = input("Continue ? (yes/no)")
            if go.replace(" ", "").lower() == "no":
                keyboard_input = True
            # Dump the dict in the YAML file to save what the user inputted for future use
            with open('data.yaml', 'w') as outfile:
                yaml.dump(self.data,
                          outfile,
                          default_flow_style=False,
                          allow_unicode=True)
        return data

    def ophys_metadata_acquisition(self):

        # TODO: Peut être faire un dictionnaire de sous-dictionnaires correspondant à chaque classe à remplir
        #         Points positifs : Plus grand lisibilité dans le YAML, gestion simple des ambiguités de nom
        #         tout en gardant la notation de NWB.
        #         Points négatifs : Plus dur à rentrer en input pour l'utilisateur (il faut lui demander à quelle classe
        #         correspond sa valeur).

        # List of the required metadata

        # TODO : maybe better implementation is possible ?

        required_metadata = [
            "session_description", "identifier", "session_start_time"
        ]

        if self.data.get('ophys_metadata') is None:
            self.data['ophys_metadata'] = dict()
            for i in required_metadata:
                self.data["ophys_metadata"] = self.add_required_metadata(
                    self.data["ophys_metadata"], i, "ophys")
        else:
            # Check if YAML file doesn't have all the required attributes and ask them the missing ones
            metadata_to_add = list(
                set(required_metadata) -
                set(list(self.data['ophys_metadata'].keys())))
            for i in metadata_to_add:
                self.data["ophys_metadata"] = self.add_required_metadata(
                    self.data["ophys_metadata"], i, "ophys")

        print("Found ophys metadata : " +
              str(list(self.data['ophys_metadata'].keys())))
        add_metadata = input(
            "Do you want to add more ophys metadata ? (yes/no) ")
        # Prevent errors if other input than yes/np
        while add_metadata.replace(
                " ", "").lower() != "no" and add_metadata.replace(
                    " ", "").lower() != "yes":
            add_metadata = input(
                "Do you want to add more ophys metadata ? (yes/no) ")
        if add_metadata.replace(" ", "").lower() == "yes":
            self.data["ophys_metadata"] = self.add_optional_metadata(
                self.data["ophys_metadata"])

        # Create new NWB file with all known attributes
        self.nwbfile = NWBFile(
            session_description=self.data['ophys_metadata'].get(
                "session_description"),
            identifier=self.data['ophys_metadata'].get("identifier"),
            session_start_time=self.data['ophys_metadata'].get(
                "session_start_time"),
            file_create_date=self.data['ophys_metadata'].get(
                "file_create_date"),
            timestamps_reference_time=self.data['ophys_metadata'].get(
                "timestamps_reference_time"),
            experimenter=self.data['ophys_metadata'].get("experimenter"),
            experiment_description=self.data['ophys_metadata'].get(
                "experiment_description"),
            session_id=self.data['ophys_metadata'].get("session_id"),
            institution=self.data['ophys_metadata'].get("institution"),
            keywords=self.data['ophys_metadata'].get("keywords"),
            notes=self.data['ophys_metadata'].get("notes"),
            pharmacology=self.data['ophys_metadata'].get("pharmacology"),
            protocol=self.data['ophys_metadata'].get("protocol"),
            related_publications=self.data['ophys_metadata'].get(
                "related_publications"),
            slices=self.data['ophys_metadata'].get("slices"),
            source_script=self.data['ophys_metadata'].get("source_script"),
            source_script_file_name=self.data['ophys_metadata'].get(
                "source_script_file_name"),
            data_collection=self.data['ophys_metadata'].get(
                "self.data['ophys_metadata']_collection"),
            surgery=self.data['ophys_metadata'].get("surgery"),
            virus=self.data['ophys_metadata'].get("virus"),
            stimulus_notes=self.data['ophys_metadata'].get("stimulus_notes"),
            lab=self.data['ophys_metadata'].get("lab"),
            subject=self.subject)

    def subject_metadata_acquisition(self):
        # Check if metadata about the subject exists and prompt the user if he wants to add some
        if self.data.get('subject_metadata') is None:
            print("No subject metadata found \n ")
            self.data['subject_metadata'] = dict()
        elif len(self.data['subject_metadata']) == 0:
            print("No subject metadata found \n ")
        else:
            print("Found subject metadata : " +
                  str(list(self.data['subject_metadata'].keys())))
        add_metadata = input(
            "Do you want to add more subject metadata ? (yes/no) ")
        # Prevent errors if other input than yes/np
        while add_metadata.replace(
                " ", "").lower() != "no" and add_metadata.replace(
                    " ", "").lower() != "yes":
            add_metadata = input(
                "Do you want to add more subject metadata ? (yes/no) ")
        if add_metadata.replace(" ", "").lower() == "yes":
            self.data['subject_metadata'] = self.add_optional_metadata(
                self.data['subject_metadata'])

        self.subject = Subject(
            age=self.data['subject_metadata'].get("age"),
            description=self.data['subject_metadata'].get("description"),
            genotype=self.data['subject_metadata'].get("genotype"),
            sex=self.data['subject_metadata'].get("sex"),
            species=self.data['subject_metadata'].get("species"),
            subject_id=self.data['subject_metadata'].get("subject_id"),
            weight=self.data['subject_metadata'].get("weight"),
            date_of_birth=self.data['subject_metadata'].get("date_of_birth"))

    def cicada_create_device(self):
        """
        class pynwb.device.Device(name, parent=None)
        """
        required_metadata = ["device_name"]

        metadata_to_add = list(
            set(required_metadata) -
            set(list(self.data['ophys_metadata'].keys())))
        for i in metadata_to_add:
            self.data["ophys_metadata"] = self.add_required_metadata(
                self.data["ophys_metadata"], i, "ophys")

        self.device = Device(
            name=self.data['ophys_metadata'].get("device_name"))

        self.nwbfile.add_device(self.device)

    def cicada_create_optical_channel(self):
        required_metadata = [
            "optical_channel_name", "optical_channel_description",
            "optical_channel_emission_lambda"
        ]

        metadata_to_add = list(
            set(required_metadata) -
            set(list(self.data['ophys_metadata'].keys())))
        for i in metadata_to_add:
            self.data["ophys_metadata"] = self.add_required_metadata(
                self.data["ophys_metadata"], i, "ophys")

        self.optical_channel = OpticalChannel(
            name=self.data['ophys_metadata'].get("optical_channel_name"),
            description=self.data['ophys_metadata'].get(
                "optical_channel_description"),
            emission_lambda=self.data['ophys_metadata'].get(
                "optical_channel_emission_lambda"))

    def cicada_create_module(self):

        required_metadata = [
            "processing_module_name", "processing_module_description"
        ]

        metadata_to_add = list(
            set(required_metadata) -
            set(list(self.data['ophys_metadata'].keys())))
        for i in metadata_to_add:
            self.data["ophys_metadata"] = self.add_required_metadata(
                self.data["ophys_metadata"], i, "ophys")

        self.mod = self.nwbfile.create_processing_module(
            name=self.data['ophys_metadata'].get("processing_module_name"),
            description=self.data['ophys_metadata'].get(
                "processing_module_description"))

    def cicada_create_imaging_plane(self):
        """ class pynwb.ophys.ImagingPlane(name, optical_channel, description, device, excitation_lambda,
            imaging_rate, indicator, location,
            manifold=None, conversion=None, unit=None, reference_frame=None, parent=None)
        """

        required_metadata = [
            "imaging_plane_name", "imaging_plane_description",
            "imaging_plane_excitation_lambda", "imaging_plane_imaging_rate",
            "imaging_plane_indicator", "imaging_plane_location"
        ]
        metadata_to_add = list(
            set(required_metadata) -
            set(list(self.data['ophys_metadata'].keys())))
        for i in metadata_to_add:
            self.data["ophys_metadata"] = self.add_required_metadata(
                self.data["ophys_metadata"], i, "ophys")
        # Nom du module où récupérer les infos de métadonnée
        name_module = "imaging_plane_"

        self.imaging_plane = self.nwbfile.create_imaging_plane(
            name=self.data['ophys_metadata'].get(name_module + "name"),
            optical_channel=self.optical_channel,
            description=self.data['ophys_metadata'].get(name_module +
                                                        "description"),
            device=self.device,
            excitation_lambda=self.data['ophys_metadata'].get(
                name_module + "excitation_lambda"),
            imaging_rate=self.data['ophys_metadata'].get(name_module +
                                                         "imaging_rate"),
            indicator=self.data['ophys_metadata'].get(name_module +
                                                      "indicator"),
            location=self.data['ophys_metadata'].get(name_module + "location"),
            manifold=self.data['ophys_metadata'].get(name_module + "manifold"),
            conversion=self.data['ophys_metadata'].get(name_module +
                                                       "conversion"),
            unit=self.data['ophys_metadata'].get(name_module + "unit"),
            reference_frame=self.data['ophys_metadata'].get(name_module +
                                                            "reference_frame"))

    def cicada_create_two_photon_series(self,
                                        data_to_store=None,
                                        external_file=None):
        """ class pynwb.ophys.TwoPhotonSeries(name, imaging_plane,
            data=None, unit=None, format=None, field_of_view=None, pmt_gain=None, scan_line_rate=None,
            external_file=None, starting_frame=None, bits_per_pixel=None, dimension=[nan], resolution=0.0,
            conversion=1.0, timestamps=None, starting_time=None, rate=None, comments='no comments',
            description='no description', control=None, control_description=None, parent=None)
        """

        required_metadata = ["two_photon_name"]
        metadata_to_add = list(
            set(required_metadata) -
            set(list(self.data['ophys_metadata'].keys())))
        for i in metadata_to_add:
            self.data["ophys_metadata"] = self.add_required_metadata(
                self.data["ophys_metadata"], i, "ophys")
        # Nom du module où récupérer les infos de métadonnée
        name_module = "two_photon_"

        self.movie_two_photon = TwoPhotonSeries(
            name=self.data['ophys_metadata'].get(name_module + "name"),
            imaging_plane=self.imaging_plane,
            data=data_to_store,
            unit=self.data['ophys_metadata'].get(name_module + "unit"),
            format=self.data['ophys_metadata'].get(name_module + "format"),
            field_of_view=self.data['ophys_metadata'].get(name_module +
                                                          "field_of_view"),
            pmt_gain=self.data['ophys_metadata'].get(name_module + "pmt_gain"),
            scan_line_rate=self.data['ophys_metadata'].get(name_module +
                                                           "scan_line_rate"),
            external_file=external_file,
            starting_frame=self.data['ophys_metadata'].get(name_module +
                                                           "starting_frame"),
            bits_per_pixel=self.data['ophys_metadata'].get(name_module +
                                                           "bits_per_pixel"),
            dimension=data_to_store.shape[1:],
            resolution=0.0,
            conversion=1.0,
            timestamps=self.data['ophys_metadata'].get(name_module +
                                                       "timestamps"),
            starting_time=self.data['ophys_metadata'].get(name_module +
                                                          "starting_time"),
            rate=1.0,
            comments="no comments",
            description="no description",
            control=self.data['ophys_metadata'].get(name_module + "control"),
            control_description=self.data['ophys_metadata'].get(
                name_module + "control_description"),
            parent=self.data['ophys_metadata'].get(name_module + "parent"))

        self.nwbfile.add_acquisition(self.movie_two_photon)

    def cicada_create_motion_correction(self):
        """  class pynwb.ophys.MotionCorrection(corrected_images_stacks={}, name='MotionCorrection')
        """

        # Nom du module où récupérer les infos de métadonnée
        name_module = "motion_correction_"

        corrected_images_stacks = {}

        self.motion_correction = MotionCorrection(
            corrected_images_stacks=corrected_images_stacks,
            name="MotionCorrection")

        self.mod.add_data_interface(self.motion_correction)

    def cicada_add_corrected_image_stack(self,
                                         corrected=None,
                                         original=None,
                                         xy_translation=None):
        """ class pynwb.ophys.CorrectedImageStack(corrected, original, xy_translation, name='CorrectedImageStack')
        """

        # Nom du module où récupérer les infos de métadonnée
        name_module = "corrected_image_stack_"

        self.corrected_image_stack = CorrectedImageStack(
            corrected=corrected,
            original=original,
            xy_translation=xy_translation,
            name="CorrectedImageStack")

        self.motion_correction.add_corrected_image_stack(
            self.corrected_image_stack)

    def cicada_add_plane_segmentation(self):
        """ class pynwb.ophys.PlaneSegmentation(description, imaging_plane,
            name=None, reference_images=None, id=None, columns=None, colnames=None)
        """

        required_metadata = ["plane_segmentation_description"]
        metadata_to_add = list(
            set(required_metadata) -
            set(list(self.data['ophys_metadata'].keys())))
        for i in metadata_to_add:
            self.data["ophys_metadata"] = self.add_required_metadata(
                self.data["ophys_metadata"], i, "ophys")

        # Nom du module où récupérer les infos de métadonnée
        name_module = "plane_segmentation_"

        self.plane_segmentation = PlaneSegmentation(
            description=self.data['ophys_metadata'].get(name_module +
                                                        "description"),
            imaging_plane=self.imaging_plane,
            name=self.data['ophys_metadata'].get(name_module + "name"),
            reference_images=self.data['ophys_metadata'].get(
                name_module + "reference_image"),
            id=self.data['ophys_metadata'].get(name_module + "id"),
            columns=self.data['ophys_metadata'].get(name_module + "columns"),
            colnames=self.data['ophys_metadata'].get(name_module + "colnames"))

        self.image_segmentation.add_plane_segmentation(self.plane_segmentation)

    def cicada_add_roi_in_plane_segmentation(self,
                                             pixel_mask=None,
                                             voxel_mask=None,
                                             image_mask=None,
                                             id_roi=None):
        """add_roi(pixel_mask=None, voxel_mask=None, image_mask=None, id=None)
        """

        self.plane_segmentation.add_roi(pixel_mask=pixel_mask,
                                        voxel_mask=voxel_mask,
                                        image_mask=image_mask,
                                        id=id_roi)

    def cicada_create_roi_table_region_in_plane_segmentation(
        self, region=slice(None, None, None)):
        """create_roi_table_region(description, region=slice(None, None, None), name='rois')"""

        required_metadata = ["roi_table_region_description"]
        metadata_to_add = list(
            set(required_metadata) -
            set(list(self.data['ophys_metadata'].keys())))
        for i in metadata_to_add:
            self.data["ophys_metadata"] = self.add_required_metadata(
                self.data["ophys_metadata"], i, "ophys")

        # Nom du module où récupérer les infos de métadonnée
        name_module = "roi_table_region_"

        self.table_region = self.plane_segmentation.create_roi_table_region(
            description=self.data['ophys_metadata'].get(name_module +
                                                        "description"),
            region=region,
            name="rois")

    def cicada_create_image_segmentation(self):
        """  class pynwb.ophys.ImageSegmentation(plane_segmentations={}, name='ImageSegmentation')
        """

        # Nom du module où récupérer les infos de métadonnée
        name_module = "image_segmentation_"

        plane_segmentations = {}

        self.image_segmentation = ImageSegmentation(
            plane_segmentations=plane_segmentations, name="ImageSegmentation")

        self.mod.add_data_interface(self.image_segmentation)

    def cicada_create_fluorescence(self):
        """   class pynwb.ophys.Fluorescence(roi_response_series={}, name='Fluorescence')
        """

        # Nom du module où récupérer les infos de métadonnée
        name_module = "fluorescence_"

        roi_response_series = {}

        self.fluorescence = Fluorescence(
            roi_response_series=roi_response_series, name="Fluorescence")

        self.mod.add_data_interface(self.fluorescence)

    def cicada_create_DfOverF(self):
        """   class pynwb.ophys.DfOverF(roi_response_series={}, name='DfOverF')
        """

        # Nom du module où récupérer les infos de métadonnée
        name_module = "DfOverF_"

        roi_response_series = {}

        self.DfOverF = DfOverF(roi_response_series=roi_response_series,
                               name="DfOverF")

        self.mod.add_data_interface(self.DfOverF)

    def cicada_add_roi_response_series(self,
                                       module,
                                       traces_data=None,
                                       rois=None):
        """  class pynwb.ophys.RoiResponseSeries(name, data, unit, rois,
             resolution=0.0, conversion=1.0, timestamps=None, starting_time=None, rate=None, comments='no comments',
             description='no description', control=None, control_description=None, parent=None)
        """

        required_metadata = [
            "roi_response_series_name", "roi_response_series_unit"
        ]
        metadata_to_add = list(
            set(required_metadata) -
            set(list(self.data['ophys_metadata'].keys())))
        for i in metadata_to_add:
            self.data["ophys_metadata"] = self.add_required_metadata(
                self.data["ophys_metadata"], i, "ophys")

        # Nom du module où récupérer les infos de métadonnée
        name_module = "roi_response_series_"

        roi_response_series = RoiResponseSeries(
            name=self.data['ophys_metadata'].get(name_module + "name"),
            data=traces_data,
            unit=self.data['ophys_metadata'].get(name_module + "unit"),
            rois=self.table_region,
            resolution=0.0,
            conversion=1.0,
            timestamps=self.data['ophys_metadata'].get(name_module +
                                                       "timestamp"),
            starting_time=self.data['ophys_metadata'].get(name_module +
                                                          "starting_time"),
            rate=1.0,
            comments="no comments",
            description="no description",
            control=self.data['ophys_metadata'].get(name_module + "control"),
            control_description=self.data['ophys_metadata'].get(
                name_module + "control_description"),
            parent=self.data['ophys_metadata'].get(name_module + "parent"))

        if module == "DfOverF":
            self.DfOverF.add_roi_response_series(roi_response_series)
        elif module == "fluorescence":
            self.fluorescence.add_roi_response_series(roi_response_series)
        else:
            print(
                f"erreur : le nom du module doit être 'DfOverF' ou 'fluorescence', et non {module} !"
            )

    def find_roi(self):

        # Chemin du dossier suite2p
        data_path = "C:/Users/François/Documents/dossier François/Stage INMED/" \
                    "Programmes/Godly Ultimate Interface/NWB/exp2nwb-master/src/suite2p"
        self.suite2p_data = dict()

        # Ouverture des fichiers stat et is_cell
        f = np.load(data_path + "/F.npy", allow_pickle=True)
        self.suite2p_data["F"] = f
        f_neu = np.load(data_path + "/Fneu.npy", allow_pickle=True)
        self.suite2p_data["Fneu"] = f_neu
        spks = np.load(data_path + "/spks.npy", allow_pickle=True)
        self.suite2p_data["spks"] = spks
        stat = np.load(data_path + "/stat.npy", allow_pickle=True)
        self.suite2p_data["stat"] = stat
        is_cell = np.load(data_path + "/iscell.npy", allow_pickle=True)
        self.suite2p_data["is_cell"] = is_cell

        # Trouve les coordonnées de chaque ROI (cellule ici)
        coord = []
        for cell in np.arange(len(stat)):
            if is_cell[cell][0] == 0:
                continue
            print(is_cell[cell][0])
            list_points_coord = [
                (x, y, 1)
                for x, y in zip(stat[cell]["xpix"], stat[cell]["ypix"])
            ]
            # coord.append(np.array(list_points_coord).transpose())

            # La suite permet d'avoir uniquement les contours (sans les pixels intérieurs)
            """
            # ATTENTION ! Il faut : from shapely.geometry import MultiPoint, LineString
            convex_hull = MultiPoint(list_points_coord).convex_hull
            if isinstance(convex_hull, LineString):
                coord_shapely = MultiPoint(list_points_coord).convex_hull.coords
            else:
                coord_shapely = MultiPoint(list_points_coord).convex_hull.exterior.coords
            coord.append(np.array(coord_shapely).transpose())
            """

        self.suite2p_data[
            "coord"] = coord  # Contient la liste des pixels inclus dans chaque ROI
예제 #4
0
    experimenter='Dr. Bilbo Baggins',
    lab='Bag End Laboratory',
    institution='University of Middle Earth at the Shire',
    experiment_description=('I went on an adventure with thirteen '
                            'dwarves to reclaim vast treasures.'),
    session_id='LONELYMTN')

####################
# Adding metadata about acquisition
# ---------------------------------
#
# Before you can add your data, you will need to provide some information about how that data was generated.
# This amounts describing the device, imaging plane and the optical channel used.

device = Device('imaging_device_1')
nwbfile.add_device(device)
optical_channel = OpticalChannel('my_optchan', 'description', 500.)
imaging_plane = nwbfile.create_imaging_plane(
    'my_imgpln', optical_channel, 'a very interesting part of the brain',
    device, 600., 300., 'GFP', 'my favorite brain location', np.ones(
        (5, 5, 3)), 4.0, 'manifold unit', 'A frame to refer to')

####################
# Adding two-photon image data
# ----------------------------
#
# Now that you have your :py:class:`~pynwb.ophys.ImagingPlane`, you can create a
# :py:class:`~pynwb.ophys.TwoPhotonSeries` - the class representing two photon imaging data.
#
# From here you have two options. The first option is to supply the image data to PyNWB, using the `data` argument.
# The other option is the provide a path the images. These two options have trade-offs, so it is worth spending time
예제 #5
0
def nwb_copy_file(old_file, new_file, cp_objs={}):
    """
    Copy fields defined in 'obj', from existing NWB file to new NWB file.

    Parameters
    ----------
    old_file : str, path
        String such as '/path/to/old_file.nwb'.
    new_file : str, path
        String such as '/path/to/new_file.nwb'.
    cp_objs : dict
        Name:Value pairs (Group:Children) listing the groups and respective
        children from the current NWB file to be copied. Children can be:
        - Boolean, indicating an attribute (e.g. for institution, lab)
        - List of strings, containing several children names
        Example:
        {'institution':True,
         'lab':True,
         'acquisition':['microphone'],
         'ecephys':['LFP','DecompositionSeries']}
    """

    manager = get_manager()

    # Open original signal file
    with NWBHDF5IO(old_file, 'r', manager=manager,
                   load_namespaces=True) as io1:
        nwb_old = io1.read()

        # Creates new file
        nwb_new = NWBFile(session_description=str(nwb_old.session_description),
                          identifier='',
                          session_start_time=datetime.now(tzlocal()))
        with NWBHDF5IO(new_file, mode='w', manager=manager,
                       load_namespaces=False) as io2:
            # Institution name ------------------------------------------------
            if 'institution' in cp_objs:
                nwb_new.institution = str(nwb_old.institution)

            # Lab name --------------------------------------------------------
            if 'lab' in cp_objs:
                nwb_new.lab = str(nwb_old.lab)

            # Session id ------------------------------------------------------
            if 'session' in cp_objs:
                nwb_new.session_id = nwb_old.session_id

            # Devices ---------------------------------------------------------
            if 'devices' in cp_objs:
                for aux in list(nwb_old.devices.keys()):
                    dev = Device(nwb_old.devices[aux].name)
                    nwb_new.add_device(dev)

            # Electrode groups ------------------------------------------------
            if 'electrode_groups' in cp_objs:
                for aux in list(nwb_old.electrode_groups.keys()):
                    nwb_new.create_electrode_group(
                        name=str(nwb_old.electrode_groups[aux].name),
                        description=str(nwb_old.electrode_groups[
                            aux].description),
                        location=str(nwb_old.electrode_groups[aux].location),
                        device=nwb_new.get_device(
                            nwb_old.electrode_groups[aux].device.name)
                    )

            # Electrodes ------------------------------------------------------
            if 'electrodes' in cp_objs:
                nElec = len(nwb_old.electrodes['x'].data[:])
                for aux in np.arange(nElec):
                    nwb_new.add_electrode(
                        x=nwb_old.electrodes['x'][aux],
                        y=nwb_old.electrodes['y'][aux],
                        z=nwb_old.electrodes['z'][aux],
                        imp=nwb_old.electrodes['imp'][aux],
                        location=str(nwb_old.electrodes['location'][aux]),
                        filtering=str(nwb_old.electrodes['filtering'][aux]),
                        group=nwb_new.get_electrode_group(
                            nwb_old.electrodes['group'][aux].name),
                        group_name=str(nwb_old.electrodes['group_name'][aux])
                    )
                # if there are custom variables
                new_vars = list(nwb_old.electrodes.colnames)
                default_vars = ['x', 'y', 'z', 'imp', 'location', 'filtering',
                                'group', 'group_name']
                [new_vars.remove(var) for var in default_vars]
                for var in new_vars:

                    if var == 'label':
                        var_data = [str(elem) for elem in nwb_old.electrodes[
                                                          var].data[:]]
                    else:
                        var_data = np.array(nwb_old.electrodes[var].data[:])

                    nwb_new.add_electrode_column(name=str(var),
                                                 description=
                                                 str(nwb_old.electrodes[
                                                     var].description),
                                                 data=var_data)

            # Epochs ----------------------------------------------------------
            if 'epochs' in cp_objs:
                nEpochs = len(nwb_old.epochs['start_time'].data[:])
                for i in np.arange(nEpochs):
                    nwb_new.add_epoch(
                        start_time=nwb_old.epochs['start_time'].data[i],
                        stop_time=nwb_old.epochs['stop_time'].data[i])
                # if there are custom variables
                new_vars = list(nwb_old.epochs.colnames)
                default_vars = ['start_time', 'stop_time', 'tags',
                                'timeseries']
                [new_vars.remove(var) for var in default_vars if
                 var in new_vars]
                for var in new_vars:
                    nwb_new.add_epoch_column(name=var,
                                             description=nwb_old.epochs[
                                                 var].description,
                                             data=nwb_old.epochs[var].data[:])

            # Invalid times ---------------------------------------------------
            if 'invalid_times' in cp_objs:
                nInvalid = len(nwb_old.invalid_times['start_time'][:])
                for aux in np.arange(nInvalid):
                    nwb_new.add_invalid_time_interval(
                        start_time=nwb_old.invalid_times['start_time'][aux],
                        stop_time=nwb_old.invalid_times['stop_time'][aux])

            # Trials ----------------------------------------------------------
            if 'trials' in cp_objs:
                nTrials = len(nwb_old.trials['start_time'])
                for aux in np.arange(nTrials):
                    nwb_new.add_trial(
                        start_time=nwb_old.trials['start_time'][aux],
                        stop_time=nwb_old.trials['stop_time'][aux])
                # if there are custom variables
                new_vars = list(nwb_old.trials.colnames)
                default_vars = ['start_time', 'stop_time']
                [new_vars.remove(var) for var in default_vars]
                for var in new_vars:
                    nwb_new.add_trial_column(name=var,
                                             description=nwb_old.trials[
                                                 var].description,
                                             data=nwb_old.trials[var].data[:])

            # Intervals -------------------------------------------------------
            if 'intervals' in cp_objs:
                all_objs_names = list(nwb_old.intervals.keys())
                for obj_name in all_objs_names:
                    obj_old = nwb_old.intervals[obj_name]
                    # create and add TimeIntervals
                    obj = TimeIntervals(name=obj_old.name,
                                        description=obj_old.description)
                    nInt = len(obj_old['start_time'])
                    for ind in np.arange(nInt):
                        obj.add_interval(start_time=obj_old['start_time'][ind],
                                         stop_time=obj_old['stop_time'][ind])
                    # Add to file
                    nwb_new.add_time_intervals(obj)

            # Stimulus --------------------------------------------------------
            if 'stimulus' in cp_objs:
                all_objs_names = list(nwb_old.stimulus.keys())
                for obj_name in all_objs_names:
                    obj_old = nwb_old.stimulus[obj_name]
                    obj = TimeSeries(name=obj_old.name,
                                     description=obj_old.description,
                                     data=obj_old.data[:],
                                     rate=obj_old.rate,
                                     resolution=obj_old.resolution,
                                     conversion=obj_old.conversion,
                                     starting_time=obj_old.starting_time,
                                     unit=obj_old.unit)
                    nwb_new.add_stimulus(obj)

            # Processing modules ----------------------------------------------
            if 'ecephys' in cp_objs:
                if cp_objs['ecephys'] is True:
                    interfaces = nwb_old.processing[
                        'ecephys'].data_interfaces.keys()
                else:  # list of items
                    interfaces = [
                        nwb_old.processing['ecephys'].data_interfaces[key]
                        for key in cp_objs['ecephys']
                    ]
                # Add ecephys module to NWB file
                ecephys_module = ProcessingModule(
                    name='ecephys',
                    description='Extracellular electrophysiology data.'
                )
                nwb_new.add_processing_module(ecephys_module)
                for interface_old in interfaces:
                    obj = copy_obj(interface_old, nwb_old, nwb_new)
                    if obj is not None:
                        ecephys_module.add_data_interface(obj)

            # Acquisition -----------------------------------------------------
            if 'acquisition' in cp_objs:
                if cp_objs['acquisition'] is True:
                    all_acq_names = list(nwb_old.acquisition.keys())
                else:  # list of items
                    all_acq_names = cp_objs['acquisition']
                for acq_name in all_acq_names:
                    obj_old = nwb_old.acquisition[acq_name]
                    obj = copy_obj(obj_old, nwb_old, nwb_new)
                    if obj is not None:
                        nwb_new.add_acquisition(obj)

            # Subject ---------------------------------------------------------
            if 'subject' in cp_objs:
                try:
                    cortical_surfaces = CorticalSurfaces()
                    surfaces = nwb_old.subject.cortical_surfaces.surfaces
                    for sfc in list(surfaces.keys()):
                        cortical_surfaces.create_surface(
                            name=surfaces[sfc].name,
                            faces=surfaces[sfc].faces,
                            vertices=surfaces[sfc].vertices)
                    nwb_new.subject = ECoGSubject(
                        cortical_surfaces=cortical_surfaces,
                        subject_id=nwb_old.subject.subject_id,
                        age=nwb_old.subject.age,
                        description=nwb_old.subject.description,
                        genotype=nwb_old.subject.genotype,
                        sex=nwb_old.subject.sex,
                        species=nwb_old.subject.species,
                        weight=nwb_old.subject.weight,
                        date_of_birth=nwb_old.subject.date_of_birth)
                except:
                    nwb_new.subject = Subject(age=nwb_old.subject.age,
                                              description=nwb_old.subject.description,
                                              genotype=nwb_old.subject.genotype,
                                              sex=nwb_old.subject.sex,
                                              species=nwb_old.subject.species,
                                              subject_id=nwb_old.subject.subject_id,
                                              weight=nwb_old.subject.weight,
                                              date_of_birth=nwb_old.subject.date_of_birth)

            # Write new file with copied fields
            io2.write(nwb_new, link_data=False)
예제 #6
0
    def run_conversion(self, nwbfile: NWBFile, metadata: dict):
        """
        Run conversionfor this data interface.
        Reads optophysiology raw data from .rsd files and adds it to nwbfile.
        XXXXXXX_A.rsd - Raw data from donor
        XXXXXXX_B.rsd - Raw data from acceptor
        XXXXXXXXX.rsh - Header data

        Parameters
        ----------
        nwbfile : NWBFile
        metadata : dict
        """
        dir_cortical_imaging = self.source_data['dir_cortical_imaging']
        ophys_list = [i.name for i in Path(dir_cortical_imaging).glob('*.rsh')]
        if len(ophys_list) > 0:
            fname_prefix = ophys_list[0].split('-')[0]
        else:
            raise OSError(f"No .rsd file found in directory: {dir_cortical_imaging}.\n"
                          "Did you choose the correct path for source data?")

        def data_gen(channel, trial):
            """channel = 'A' or 'B'"""
            # Read trial-specific metadata file .rsh
            trial_meta = os.path.join(dir_cortical_imaging, f"{fname_prefix}-{trial}_{channel}.rsh")
            file_rsm, files_raw, acquisition_date, sample_rate, n_frames = self.read_trial_meta(trial_meta=trial_meta)

            # Iterates over all files within the same trial
            for fn, fraw in enumerate(files_raw):
                print('adding channel ' + channel + ', trial: ', trial, ': ', 100 * fn / len(files_raw), '%')
                fpath = os.path.join(dir_cortical_imaging, fraw)

                # Open file as a byte array
                with open(fpath, "rb") as f:
                    byte = f.read(1000000000)
                # Data as word array: 'h' signed, 'H' unsigned
                words = np.array(struct.unpack('h' * (len(byte) // 2), byte))

                # Iterates over frames within the same file (n_frames, 100, 100)
                n_frames = int(len(words) / 12800)
                words_reshaped = words.reshape(12800, n_frames, order='F')
                frames = np.zeros((n_frames, 100, 100))
                excess_frames = np.zeros((n_frames, 20, 100))
                for ifr in range(n_frames):
                    iframe = -words_reshaped[:, ifr].reshape(128, 100, order='F').astype('int16')
                    frames[ifr, :, :] = iframe[20:120, :]
                    excess_frames[ifr, :, :] = iframe[0:20, :]

                    yield iframe[20:120, :]

                #     # Analog signals are taken from excess data variable
                #     analog_1 = np.squeeze(np.squeeze(excess_frames[:, 12, 0:80:4]).reshape(20*256, 1))
                #     analog_2 = np.squeeze(np.squeeze(excess_frames[:, 14, 0:80:4]).reshape(20*256, 1))
                #     stim_trg = np.squeeze(np.squeeze(excess_frames[:, 8, 0:80:4]).reshape(20*256, 1))

        # Get session_start_time from first header file
        all_files = os.listdir(dir_cortical_imaging)
        all_headers = [f for f in all_files if ('.rsh' in f) and ('_A' not in f) and ('_B' not in f)]
        all_headers.sort()
        _, _, acquisition_date, _, _ = self.read_trial_meta(trial_meta=Path(dir_cortical_imaging) / all_headers[0])
        session_start_time = datetime.strptime(acquisition_date, '%Y/%m/%d %H:%M:%S')
        session_start_time_tzaware = pytz.timezone('EST').localize(session_start_time)
        if session_start_time_tzaware != nwbfile.session_start_time:
            print("Session start time in current nwbfile does not match the start time from rsd files.")
            print("Ophys data conversion aborted.")
            return

        # Create and add device
        device = Device(name=metadata['Ophys']['Device']['name'])
        nwbfile.add_device(device)

        # Get FRETSeries metadata
        meta_donor = metadata['Ophys']['FRET']['donor'][0]
        meta_acceptor = metadata['Ophys']['FRET']['acceptor'][0]

        # OpticalChannels
        opt_ch_donor = OpticalChannel(
            name=meta_donor['optical_channel'][0]['name'],
            description=meta_donor['optical_channel'][0]['description'],
            emission_lambda=float(meta_donor['optical_channel'][0]['emission_lambda'])
        )
        opt_ch_acceptor = OpticalChannel(
            name=meta_acceptor['optical_channel'][0]['name'],
            description=meta_acceptor['optical_channel'][0]['description'],
            emission_lambda=float(meta_acceptor['optical_channel'][0]['emission_lambda'])
        )

        # Add trials intervals values only if no trials data exists in nwbfile
        if nwbfile.trials is not None:
            add_trials = False
            print('Trials already exist in current nwb file. Ophys trials intervals not added.')
        else:
            add_trials = True

        # Iterate over trials, creates a FRET group per trial
        trials_numbers = [f.split('-')[1].replace('.rsh', '') for f in all_headers]
        for tr in trials_numbers:
            # Read trial-specific metadata file .rsh
            trial_meta_A = os.path.join(dir_cortical_imaging, f"{fname_prefix}-{tr}_A.rsh")
            trial_meta_B = os.path.join(dir_cortical_imaging, f"{fname_prefix}-{tr}_B.rsh")
            file_rsm_A, files_raw_A, acquisition_date_A, sample_rate_A, n_frames_A = self.read_trial_meta(trial_meta=trial_meta_A)
            file_rsm_B, files_raw_B, acquisition_date_B, sample_rate_B, n_frames_B = self.read_trial_meta(trial_meta=trial_meta_B)

            absolute_start_time = datetime.strptime(acquisition_date_A, '%Y/%m/%d %H:%M:%S')
            relative_start_time = float((absolute_start_time - nwbfile.session_start_time.replace(tzinfo=None)).seconds)

            # Checks if Acceptor and Donor channels have the same basic parameters
            assert acquisition_date_A == acquisition_date_B, \
                "Acquisition date of channels do not match. Trial=" + str(tr)
            assert sample_rate_A == sample_rate_B, \
                "Sample rate of channels do not match. Trial=" + str(tr)
            assert n_frames_A == n_frames_B, \
                "Number of frames of channels do not match. Trial=" + str(tr)
            assert relative_start_time >= 0., \
                "Starting time is negative. Trial=" + str(tr)

            # Create iterator
            data_donor = DataChunkIterator(
                data=data_gen(channel='A', trial=tr),
                iter_axis=0,
                buffer_size=10000,
                maxshape=(None, 100, 100)
            )
            data_acceptor = DataChunkIterator(
                data=data_gen(channel='B', trial=tr),
                iter_axis=0,
                buffer_size=10000,
                maxshape=(None, 100, 100)
            )

            # FRETSeries
            frets_donor = FRETSeries(
                name='donor',
                fluorophore=meta_donor['fluorophore'],
                optical_channel=opt_ch_donor,
                device=device,
                description=meta_donor['description'],
                data=data_donor,
                starting_time=relative_start_time,
                rate=sample_rate_A,
                unit=meta_donor['unit'],
            )
            frets_acceptor = FRETSeries(
                name='acceptor',
                fluorophore=meta_acceptor['fluorophore'],
                optical_channel=opt_ch_acceptor,
                device=device,
                description=meta_acceptor['description'],
                data=data_acceptor,
                starting_time=relative_start_time,
                rate=sample_rate_B,
                unit=meta_acceptor['unit']
            )

            # Add FRET to acquisition
            meta_fret = metadata['Ophys']['FRET']
            fret = FRET(
                name=meta_fret['name'] + '_' + str(tr),
                excitation_lambda=float(meta_fret['excitation_lambda']),
                donor=frets_donor,
                acceptor=frets_acceptor
            )
            nwbfile.add_acquisition(fret)

            # Add trial
            if add_trials:
                tr_stop = relative_start_time + n_frames_A / sample_rate_A
                nwbfile.add_trial(
                    start_time=relative_start_time,
                    stop_time=tr_stop
                )
예제 #7
0
# DEVICES & ELECTRODE GROUPS
"""
Add in probes as devices and electrode groups.
"""
probe_descriptions = pd.read_csv('probes.description.tsv', sep='\t')
probe_descriptions = list(probe_descriptions['description'])
electrode_groups = list()
for i in range(len(probe_descriptions)):
    probe_device = Device(name=str(i))
    probe_electrode_group = ElectrodeGroup(
        name='Probe' + str(i + 1),
        description='Neuropixels Phase3A opt3',
        device=probe_device,
        location='')
    nwb_file.add_device(probe_device)
    electrode_groups.append(probe_electrode_group)
    nwb_file.add_electrode_group(probe_electrode_group)

# CHANNELS
"""
Add channel information into the Electrode Table.
"""
# Read data
insertion_df = pd.read_csv('probes.insertion.tsv', sep='\t')
insertion_df['group_name'] = insertion_df.index.values

channel_site = read_npy_file('channels.site.npy')
channel_brain = pd.read_csv('channels.brainLocation.tsv', sep='\t')

channel_probes = read_npy_file('channels.probe.npy')
예제 #8
0
from pynwb import NWBFile, NWBHDF5IO
from pynwb.device import Device
from pynwb.ophys import OpticalChannel
from ndx_fret import FRET, FRETSeries

from datetime import datetime
import numpy as np

nwb = NWBFile('session_description', 'identifier', datetime.now().astimezone())

# Create and add device
device = Device(name='Device')
nwb.add_device(device)

# Create optical channels
opt_ch_d = OpticalChannel(name='optical_channel',
                          description='optical_channel_description',
                          emission_lambda=529.)
opt_ch_a = OpticalChannel(name='optical_channel',
                          description='optical_channel_description',
                          emission_lambda=633.)

# Create FRET
fs_d = FRETSeries(name='donor',
                  fluorophore='mCitrine',
                  optical_channel=opt_ch_d,
                  device=device,
                  description='description of donor series',
                  data=np.random.randn(100, 10, 10),
                  rate=200.,
                  unit='no unit')
예제 #9
0
data_dir = '/Volumes/black_backup/data/Soltesz/example_miniscope'

settings_and_notes_file = os.path.join(data_dir, 'settings_and_notes.dat')

df = pd.read_csv(settings_and_notes_file, sep='\t').loc[0]

session_start_time = datetime(2017, 4, 15, 12, tzinfo=tzlocal())

nwb = NWBFile('session_description', 'identifier', session_start_time)

miniscope = Miniscope(name='Miniscope',
                      excitation=int(df['excitation']),
                      msCamExposure=int(df['msCamExposure']),
                      recordLength=int(df['recordLength']))

nwb.add_device(miniscope)

ms_files = [
    os.path.split(x)[1]
    for x in natsorted(glob(os.path.join(data_dir, 'msCam*.avi')))
]

behav_files = [
    os.path.split(x)[1]
    for x in natsorted(glob(os.path.join(data_dir, 'behavCam*.avi')))
]

nwb.add_acquisition(
    ImageSeries(name='OnePhotonSeries',
                format='external',
                external_file=ms_files,
예제 #10
0
              session_start_time=session_start_time,
              experimenter='Joseph E. ODoherty',
              lab='Sabes lab',
              institution='University of California, San Francisco',
              experiment_description=experiment_description,
              session_id='indy_20160407_02')


# Create Device and ElectrodeGroup and adding electrode information to nwb.


# M1

#Create device
device_M1 = Device('Recording_Device_M1')
nwb.add_device(device_M1)

# Create electrode group
electrode_group_M1 = nwb.create_electrode_group(name='ElectrodeArrayM1', description="96 Channels Electrode Array", 
                                    location="Motor Cortex", 
                                    device=device_M1)

# Add metadata about each electrode in the group
for idx in np.arange(96):
    nwb.add_electrode(x=np.nan, y=np.nan, z=np.nan,
                      imp=np.nan,
                      location='M1', filtering='none',
                      group=electrode_group_M1)


# S1
예제 #11
0
class TestNWBFileReading(TestCase):
    def setUp(self):
        start_time = datetime(2017, 4, 3, 11, tzinfo=tzlocal())
        create_date = datetime(2017, 4, 15, 12, tzinfo=tzlocal())
        self.nwb_file_content = NWBFile(
            session_description='demonstrate NWBFile basics',
            identifier='nwb_file',
            session_start_time=start_time,
            file_create_date=create_date)

    def test_read_nwb_header_device_successfully(self):
        header_device = HeaderDevice(
            name='header_device',
            headstage_serial='Sample headstage_serial',
            headstage_smart_ref_on='Sample headstage_smart_ref_on',
            realtime_mode='Sample realtime_mode',
            headstage_auto_settle_on='Sample headstage_auto_settle_on',
            timestamp_at_creation='Sample timestamp_at_creation',
            controller_firmware_version='Sample controller_firmware_version',
            controller_serial='Sample controller_serial',
            save_displayed_chan_only='Sample save_displayed_chan_only',
            headstage_firmware_version='Sample headstage_firmware_version',
            qt_version='Sample qt_version',
            compile_date='Sample compile_date',
            compile_time='Sample compile_time',
            file_prefix='Sample file_prefix',
            headstage_gyro_sensor_on='Sample headstage_gyro_sensor_on',
            headstage_mag_sensor_on='Sample headstage_mag_sensor_on',
            trodes_version='Sample trodes_version',
            headstage_accel_sensor_on='Sample headstage_accel_sensor_on',
            commit_head='Sample commit_head',
            system_time_at_creation='Sample system_time_at_creation',
            file_path='Sample file_path',
        )

        self.nwb_file_content.add_device(header_device)
        nwb_file_handler = NWBHDF5IO('header_device.nwb', mode='w')
        nwb_file_handler.write(self.nwb_file_content)
        nwb_file_handler.close()

        self.assertTrue(os.path.exists('header_device.nwb'))
        with pynwb.NWBHDF5IO('header_device.nwb', 'r') as nwb_file_handler:
            nwb_file = nwb_file_handler.read()
            self.assertEqual(nwb_file.devices['header_device'].commit_head,
                             header_device.commit_head)

        self.delete_nwb('header_device')

    def test_read_nwb_probe_successfully(self):
        shanks_electrode = ShanksElectrode(name='electrode_shank',
                                           rel_x=1.0,
                                           rel_y=2.0,
                                           rel_z=3.0)
        shank = Shank(name='shank')
        shank.add_shanks_electrode(shanks_electrode)
        probe = Probe(name='probe',
                      units='mm',
                      id=1,
                      probe_type='type_1',
                      probe_description='2',
                      contact_size=1.0,
                      contact_side_numbering=False)
        probe.add_shank(shank)
        self.nwb_file_content.add_device(probe)

        nwb_file_handler = NWBHDF5IO('probe.nwb', mode='w')
        nwb_file_handler.write(self.nwb_file_content)
        nwb_file_handler.close()

        self.assertTrue(os.path.exists('probe.nwb'))
        with pynwb.NWBHDF5IO('probe.nwb', 'r',
                             load_namespaces=True) as nwb_file_handler:
            nwb_file = nwb_file_handler.read()
            self.assertContainerEqual(nwb_file.devices['probe'], probe)

        self.delete_nwb('probe')

    def test_read_nwb_data_acq_device_successfully(self):
        data_acq_device = DataAcqDevice(name='DataAcqDevice1',
                                        system='System1',
                                        amplifier='Amplifier1',
                                        adc_circuit='adc_circuit1')
        self.nwb_file_content.add_device(data_acq_device)

        nwb_file_handler = NWBHDF5IO('data_acq_device.nwb', mode='w')
        nwb_file_handler.write(self.nwb_file_content)
        nwb_file_handler.close()

        self.assertTrue(os.path.exists('data_acq_device.nwb'))
        with pynwb.NWBHDF5IO('data_acq_device.nwb', 'r',
                             load_namespaces=True) as nwb_file_handler:
            nwb_file = nwb_file_handler.read()
            self.assertContainerEqual(nwb_file.devices['DataAcqDevice1'],
                                      data_acq_device)

        self.delete_nwb('data_acq_device')

    def test_read_nwb_camera_device_successfully(self):
        camera_device = CameraDevice(name='CameraDevice1',
                                     meters_per_pixel=0.20,
                                     camera_name='test name',
                                     model='ndx2000',
                                     lens='500dpt',
                                     manufacturer='sony')
        self.nwb_file_content.add_device(camera_device)

        nwb_file_handler = NWBHDF5IO('camera_device.nwb', mode='w')
        nwb_file_handler.write(self.nwb_file_content)
        nwb_file_handler.close()

        self.assertTrue(os.path.exists('camera_device.nwb'))
        with pynwb.NWBHDF5IO('camera_device.nwb', 'r',
                             load_namespaces=True) as nwb_file_handler:
            nwb_file = nwb_file_handler.read()
            self.assertContainerEqual(nwb_file.devices['CameraDevice1'],
                                      camera_device)

        self.delete_nwb('camera_device')

    def test_read_nwb_nwb_image_series_successfully(self):
        device_1 = Device('device1')
        device_2 = Device('device2')
        mock_timestamps = [1, 2, 3]
        mock_external_file = ['some file']

        nwb_image_series = NwbImageSeries(name='NwbImageSeries1',
                                          timestamps=mock_timestamps,
                                          external_file=mock_external_file,
                                          devices=[device_1, device_2])

        behavioral_time_series = BehavioralEvents(name="BehavioralTimeSeries")
        behavioral_time_series.add_timeseries(nwb_image_series)
        processing_module = ProcessingModule(name='ProcessingModule',
                                             description='')
        processing_module.add_data_interface(behavioral_time_series)
        self.nwb_file_content.add_processing_module(processing_module)

        self.nwb_file_content.add_stimulus_template(nwb_image_series)

        nwb_file_handler = NWBHDF5IO('nwb_image_series.nwb', mode='w')
        nwb_file_handler.write(self.nwb_file_content)
        nwb_file_handler.close()

        self.assertTrue(os.path.exists('nwb_image_series.nwb'))
        with pynwb.NWBHDF5IO('nwb_image_series.nwb', 'r',
                             load_namespaces=True) as nwb_file_handler:
            nwb_file = nwb_file_handler.read()
            self.assertContainerEqual(
                nwb_file.stimulus_template['NwbImageSeries1'],
                nwb_image_series)
            self.assertContainerEqual(
                nwb_file.processing['ProcessingModule'].data_interfaces[
                    'BehavioralTimeSeries'].time_series['NwbImageSeries1'],
                nwb_image_series)

        self.delete_nwb('nwb_image_series')

    def test_read_nwb_nwb_electrode_group_successfully(self):
        device = Device('device_0')
        self.nwb_file_content.add_device(device)
        nwb_electrode_group = NwbElectrodeGroup(
            name='nwb_electrode_group_0',
            description='Sample description',
            location='Sample location',
            device=device,
            targeted_location='predicted location',
            targeted_x=1.0,
            targeted_y=2.0,
            targeted_z=3.0,
            units='um')

        self.nwb_file_content.add_electrode_group(nwb_electrode_group)
        nwb_file_handler = NWBHDF5IO('nwb_electrode_group.nwb', mode='w')
        nwb_file_handler.write(self.nwb_file_content)
        nwb_file_handler.close()

        self.assertTrue(os.path.exists('nwb_electrode_group.nwb'))
        with pynwb.NWBHDF5IO('nwb_electrode_group.nwb',
                             'r') as nwb_file_handler:
            nwb_file = nwb_file_handler.read()
            self.assertEqual(
                nwb_file.electrode_groups['nwb_electrode_group_0'].name,
                nwb_electrode_group.name)
            self.assertEqual(
                nwb_file.electrode_groups['nwb_electrode_group_0'].
                targeted_location, nwb_electrode_group.targeted_location)

        self.delete_nwb('nwb_electrode_group')

    def test_read_nwb_associated_files_successfully(self):
        associated_files = AssociatedFiles(
            name='file1',
            description='description of file1',
            content='1 2 3 content of file test',
            task_epochs='1, 2')
        self.nwb_file_content.add_processing_module(
            ProcessingModule('associated_files',
                             'description_of_associaed_files'))
        self.nwb_file_content.processing['associated_files'].add(
            associated_files)

        nwb_file_handler = NWBHDF5IO('associated_files.nwb', mode='w')
        nwb_file_handler.write(self.nwb_file_content)
        nwb_file_handler.close()

        self.assertTrue(os.path.exists('associated_files.nwb'))
        with pynwb.NWBHDF5IO('associated_files.nwb', 'r') as nwb_file_handler:
            nwb_file = nwb_file_handler.read()

            self.assertIsInstance(
                nwb_file.processing['associated_files']['file1'],
                AssociatedFiles)
            self.assertEqual(
                'file1', nwb_file.processing['associated_files']['file1'].name)
            self.assertEqual(
                'description of file1', nwb_file.processing['associated_files']
                ['file1'].fields['description'])
            self.assertEqual(
                '1 2 3 content of file test',
                nwb_file.processing['associated_files']
                ['file1'].fields['content'])
            self.assertEqual(
                '1, 2', nwb_file.processing['associated_files']
                ['file1'].fields['task_epochs'])

        self.delete_nwb('associated_files')

    @staticmethod
    def delete_nwb(filename):
        os.remove(filename + '.nwb')
예제 #12
0
    def setUp(self):

        nwbfile = NWBFile(
            'my first synthetic recording',
            'EXAMPLE_ID',
            datetime.now(tzlocal()),
            experimenter='Dr. Bilbo Baggins',
            lab='Bag End Laboratory',
            institution='University of Middle Earth at the Shire',
            experiment_description=('I went on an adventure with thirteen '
                                    'dwarves to reclaim vast treasures.'),
            session_id='LONELYMTN')

        device = Device('imaging_device_1')
        nwbfile.add_device(device)
        optical_channel = OpticalChannel('my_optchan', 'description', 500.)
        imaging_plane = nwbfile.create_imaging_plane(
            'my_imgpln', optical_channel,
            'a very interesting part of the brain',
            device, 600., 300., 'GFP', 'my favorite brain location',
            np.ones((5, 5, 3)), 4.0, 'manifold unit', 'A frame to refer to')

        self.image_series = TwoPhotonSeries(name='test_iS',
                                            dimension=[2],
                                            data=np.random.rand(10, 5, 5, 3),
                                            external_file=['images.tiff'],
                                            imaging_plane=imaging_plane,
                                            starting_frame=[0],
                                            format='tiff',
                                            starting_time=0.0,
                                            rate=1.0)
        nwbfile.add_acquisition(self.image_series)

        mod = nwbfile.create_processing_module(
            'ophys', 'contains optical physiology processed data')
        img_seg = ImageSegmentation()
        mod.add(img_seg)
        ps = img_seg.create_plane_segmentation(
            'output from segmenting my favorite imaging plane', imaging_plane,
            'my_planeseg', self.image_series)

        w, h = 3, 3
        pix_mask1 = [(0, 0, 1.1), (1, 1, 1.2), (2, 2, 1.3)]
        vox_mask1 = [(0, 0, 0, 1.1), (1, 1, 1, 1.2), (2, 2, 2, 1.3)]
        img_mask1 = [[0.0 for x in range(w)] for y in range(h)]
        img_mask1[0][0] = 1.1
        img_mask1[1][1] = 1.2
        img_mask1[2][2] = 1.3
        ps.add_roi(pixel_mask=pix_mask1,
                   image_mask=img_mask1,
                   voxel_mask=vox_mask1)

        pix_mask2 = [(0, 0, 2.1), (1, 1, 2.2)]
        vox_mask2 = [(0, 0, 0, 2.1), (1, 1, 1, 2.2)]
        img_mask2 = [[0.0 for x in range(w)] for y in range(h)]
        img_mask2[0][0] = 2.1
        img_mask2[1][1] = 2.2
        ps.add_roi(pixel_mask=pix_mask2,
                   image_mask=img_mask2,
                   voxel_mask=vox_mask2)

        fl = Fluorescence()
        mod.add(fl)

        rt_region = ps.create_roi_table_region('the first of two ROIs',
                                               region=[0])

        data = np.array([0., 1., 2., 3., 4., 5., 6., 7., 8.,
                         9.]).reshape(10, 1)
        timestamps = [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
        rrs = fl.create_roi_response_series('my_rrs',
                                            data,
                                            rt_region,
                                            unit='lumens',
                                            timestamps=timestamps)

        self.df_over_f = DfOverF(rrs)
예제 #13
0
def test_device():
    nwbfile = NWBFile('session description',
                      'session id',
                      datetime.now(tzlocal()),
                      experimenter='experimenter name',
                      lab='lab name',
                      institution='institution name',
                      experiment_description=('experiment description'),
                      session_id='sessionid')
    laserline_device = LaserLine(name='mylaserline1',
                                 reference='test_ref_laserline',
                                 analog_modulation_frequency=Measurement(
                                     name='None',
                                     description='None',
                                     unit='Hz',
                                     data=[100]),
                                 power=Measurement(name='None',
                                                   description='None',
                                                   unit='uW',
                                                   data=[100]))

    laserline_devices_ = LaserLineDevices()
    laserline_devices_.add_laserline(laserline_device)
    laserline_devices_.create_laserline(
        name='mylaserline2',
        reference='test_ref_laserline',
        analog_modulation_frequency=Measurement(name='None',
                                                description='None',
                                                unit='Hz',
                                                data=[100]),
        power=Measurement(name='None',
                          description='None',
                          unit='uW',
                          data=[100]))

    photodetector_device = PhotoDetector(name='myphotodetector1',
                                         reference='test_ref_photodetector',
                                         gain=Measurement(name='None',
                                                          description='None',
                                                          unit='Hz',
                                                          data=[1]),
                                         bandwidth=Measurement(
                                             name='None',
                                             description='None',
                                             unit='uW',
                                             data=[50]))

    photodetector_devices_ = PhotoDetectorDevices()
    photodetector_devices_.add_photodetector(photodetector_device)

    lockinamp_device = LockInAmplifier(name='mylockinamp',
                                       demodulation_filter_order=10.0,
                                       reference='test_ref',
                                       demod_bandwidth=Measurement(
                                           name='None',
                                           description='None',
                                           unit='Hz',
                                           data=[150]),
                                       columns=[
                                           VectorData(name='channel_name',
                                                      description='None',
                                                      data=['name1', 'name2']),
                                           Measurement(name='offset',
                                                       description='None',
                                                       unit='mV',
                                                       data=[140, 260]),
                                           VectorData(name='gain',
                                                      description='None',
                                                      data=[250, 250])
                                       ])
    lockinamp_devices_ = LockInAmplifierDevices()
    lockinamp_devices_.add_lockinamp(lockinamp_device)

    nwbfile.add_device(
        TEMPO(name='tempo_test',
              laserline_devices=laserline_devices_,
              photodetector_devices=photodetector_devices_,
              lockinamp_devices=lockinamp_devices_))

    # testing laserline_device
    assert_array_equal(
        np.array(nwbfile.devices['tempo_test'].laserline_devices.children[0].
                 analog_modulation_frequency.data), np.array([100]))
    # testing photodetector_device
    assert_array_equal(
        np.array(nwbfile.devices['tempo_test'].photodetector_devices.
                 children[0].bandwidth.data), np.array([50]))
    # testing lockinamp_device
    assert_array_equal(
        np.array(nwbfile.devices['tempo_test'].lockinamp_devices.children[0].
                 columns[1].data), np.array([140, 260]))

    with NWBHDF5IO('test_ndx-tempo.nwb', 'w') as io:
        io.write(nwbfile)
        del nwbfile
    with NWBHDF5IO('test_ndx-tempo.nwb', 'r', load_namespaces=True) as io:
        nwb = io.read()
        # testing laserline_device
        assert_array_equal(
            np.array(nwb.devices['tempo_test'].laserline_devices.children[0].
                     analog_modulation_frequency.data), np.array([100]))
        # testing photodetector_device
        assert_array_equal(
            np.array(nwb.devices['tempo_test'].photodetector_devices.
                     children[0].bandwidth.data), np.array([50]))
        # testing lockinamp_device
        assert_array_equal(
            np.array(nwb.devices['tempo_test'].lockinamp_device.children[0].
                     columns[1].data), np.array([140, 260]))
예제 #14
0
class Alyx2NWBConverter:
    def __init__(self,
                 saveloc=None,
                 nwb_metadata_file=None,
                 metadata_obj: Alyx2NWBMetadata = None,
                 one_object: ONE = None,
                 save_raw=False,
                 save_camera_raw=False,
                 complevel=4,
                 shuffle=False,
                 buffer_size=1):
        """
        Retrieve all Alyx session, subject metadata, raw data for eid using the one apis load method
        Map that to nwb supported datatypes and create an nwb file.
        Parameters
        ----------
        saveloc: str, Path
            save location of nwbfile
        nwb_metadata_file: [dict, str]
            output of Alyx2NWBMetadata as a dict/json location str
        metadata_obj: Alyx2NWBMetadata
        one_object: ONE()
        save_raw: bool
            will load and save large raw files: ecephys.raw.ap/lf.cbin to nwb
        save_camera_raw: bool
            will load and save mice camera movie .mp4: _iblrig_Camera.raw
        complevel: int
            level of compression to apply to raw datasets
            (0-9)>(low,high). https://docs.h5py.org/en/latest/high/dataset.html
        shuffle: bool
            Enable shuffle I/O filter. http://docs.h5py.org/en/latest/high/dataset.html#dataset-shuffle
        """
        self.buffer_size = buffer_size
        self.complevel = complevel
        self.shuffle = shuffle
        if nwb_metadata_file is not None:
            if isinstance(nwb_metadata_file, dict):
                self.nwb_metadata = nwb_metadata_file
            elif isinstance(nwb_metadata_file, str):
                with open(nwb_metadata_file, 'r') as f:
                    self.nwb_metadata = json.load(f)
        elif metadata_obj is not None:
            self.nwb_metadata = metadata_obj.complete_metadata
        else:
            raise Exception(
                'required one of argument: nwb_metadata_file OR metadata_obj')
        if one_object is not None:
            self.one_object = one_object
        elif metadata_obj is not None:
            self.one_object = metadata_obj.one_obj
        else:
            Warning('creating a ONE object and continuing')
            self.one_object = ONE()
        if saveloc is None:
            Warning('saving nwb file in current working directory')
            self.saveloc = str(Path.cwd())
        else:
            self.saveloc = str(saveloc)
        self.eid = self.nwb_metadata["eid"]
        if not isinstance(self.nwb_metadata['NWBFile']['session_start_time'],
                          datetime):
            self.nwb_metadata['NWBFile']['session_start_time'] = \
                datetime.strptime(self.nwb_metadata['NWBFile']['session_start_time'], '%Y-%m-%dT%X').replace(
                    tzinfo=pytz.utc)
            self.nwb_metadata['IBLSubject']['date_of_birth'] = \
                datetime.strptime(self.nwb_metadata['IBLSubject']['date_of_birth'], '%Y-%m-%dT%X').replace(
                    tzinfo=pytz.utc)
        # create nwbfile:
        self.initialize_nwbfile()
        self.no_probes = len(self.nwb_metadata['Probes'])
        if self.no_probes == 0:
            warnings.warn(
                'could not find probe information, will create trials, behavior, acquisition'
            )
        self.electrode_table_exist = False
        self._one_data = _OneData(self.one_object,
                                  self.eid,
                                  self.no_probes,
                                  self.nwb_metadata,
                                  save_raw=save_raw,
                                  save_camera_raw=save_camera_raw)

    def initialize_nwbfile(self):
        """
        Creates self.nwbfile, devices and electrode group of nwb file.
        """
        nwbfile_args = dict(identifier=str(uuid.uuid4()), )
        nwbfile_args.update(**self.nwb_metadata['NWBFile'])
        self.nwbfile = NWBFile(**nwbfile_args)
        # create devices
        [
            self.nwbfile.create_device(**idevice_meta)
            for idevice_meta in self.nwb_metadata['Ecephys']['Device']
        ]
        if 'ElectrodeGroup' in self.nwb_metadata['Ecephys']:
            self.create_electrode_groups(self.nwb_metadata['Ecephys'])

    def create_electrode_groups(self, metadata_ecephys):
        """
        This method is called at __init__.
        Use metadata to create ElectrodeGroup object(s) in the NWBFile

        Parameters
        ----------
        metadata_ecephys : dict
            Dict with key:value pairs for defining the Ecephys group from where this
            ElectrodeGroup belongs. This should contain keys for required groups
            such as 'Device', 'ElectrodeGroup', etc.
        """
        for metadata_elec_group in metadata_ecephys['ElectrodeGroup']:
            eg_name = metadata_elec_group['name']
            # Tests if ElectrodeGroup already exists
            aux = [i.name == eg_name for i in self.nwbfile.children]
            if any(aux):
                print(eg_name + ' already exists in current NWBFile.')
            else:
                device_name = metadata_elec_group['device']
                if device_name in self.nwbfile.devices:
                    device = self.nwbfile.devices[device_name]
                else:
                    print('Device ', device_name, ' for ElectrodeGroup ',
                          eg_name, ' does not exist.')
                    print('Make sure ', device_name,
                          ' is defined in metadata.')

                eg_description = metadata_elec_group['description']
                eg_location = metadata_elec_group['location']
                self.nwbfile.create_electrode_group(name=eg_name,
                                                    location=eg_location,
                                                    device=device,
                                                    description=eg_description)

    def check_module(self, name, description=None):
        """
        Check if processing module exists. If not, create it. Then return module

        Parameters
        ----------
        name: str
        description: str | None (optional)

        Returns
        -------
        pynwb.module

        """

        if name in self.nwbfile.processing:
            return self.nwbfile.processing[name]
        else:
            if description is None:
                description = name
            return self.nwbfile.create_processing_module(name, description)

    def create_stimulus(self):
        """
        Creates stimulus data in nwbfile
        """
        stimulus_list = self._get_data(
            self.nwb_metadata['Stimulus'].get('time_series'))
        for i in stimulus_list:
            self.nwbfile.add_stimulus(pynwb.TimeSeries(**i))

    def create_units(self):
        """
        Units table in nwbfile
        """
        if self.no_probes == 0:
            return
        if not self.electrode_table_exist:
            self.create_electrode_table_ecephys()
        unit_table_list = self._get_data(self.nwb_metadata['Units'])
        # no required arguments for units table. Below are default columns in the table.
        default_args = [
            'id', 'waveform_mean', 'electrodes', 'electrode_group',
            'spike_times', 'obs_intervals'
        ]
        default_ids = _get_default_column_ids(
            default_args, [i['name'] for i in unit_table_list])
        if len(default_ids) != len(default_args):
            warnings.warn(f'could not find all of {default_args} clusters')
        non_default_ids = list(
            set(range(len(unit_table_list))).difference(set(default_ids)))
        default_dict = {
            unit_table_list[id]['name']: unit_table_list[id]['data']
            for id in default_ids
        }
        for cluster_no in range(len(unit_table_list[0]['data'])):
            add_dict = dict()
            for ibl_dataset_name in default_dict:
                if ibl_dataset_name == 'electrodes':
                    add_dict.update({
                        ibl_dataset_name:
                        [default_dict[ibl_dataset_name][cluster_no]]
                    })
                if ibl_dataset_name == 'spike_times':
                    add_dict.update({
                        ibl_dataset_name:
                        default_dict[ibl_dataset_name][cluster_no]
                    })
                elif ibl_dataset_name == 'obs_intervals':  # common across all clusters
                    add_dict.update(
                        {ibl_dataset_name: default_dict[ibl_dataset_name]})
                elif ibl_dataset_name == 'electrode_group':
                    add_dict.update({
                        ibl_dataset_name:
                        self.nwbfile.electrode_groups[self.nwb_metadata[
                            'Probes'][default_dict[ibl_dataset_name]
                                      [cluster_no]]['name']]
                    })
                elif ibl_dataset_name == 'id':
                    if cluster_no >= self._one_data.data_attrs_dump[
                            'unit_table_length'][0]:
                        add_dict.update({
                            ibl_dataset_name:
                            default_dict[ibl_dataset_name][cluster_no] +
                            self._one_data.data_attrs_dump['unit_table_length']
                            [0]
                        })
                    else:
                        add_dict.update({
                            ibl_dataset_name:
                            default_dict[ibl_dataset_name][cluster_no]
                        })
                elif ibl_dataset_name == 'waveform_mean':
                    add_dict.update({
                        ibl_dataset_name:
                        np.mean(default_dict[ibl_dataset_name][cluster_no],
                                axis=1)
                    })  # finding the mean along all the channels of the sluter
            self.nwbfile.add_unit(**add_dict)

        for id in non_default_ids:
            if isinstance(unit_table_list[id]['data'], object):
                unit_table_list[id]['data'] = unit_table_list[id][
                    'data'].tolist()  # convert string numpy
            self.nwbfile.add_unit_column(
                name=unit_table_list[id]['name'],
                description=unit_table_list[id]['description'],
                data=unit_table_list[id]['data'])

    def create_electrode_table_ecephys(self):
        """
        Creates electrode table
        """
        if self.no_probes == 0:
            return
        if self.electrode_table_exist:
            pass
        electrode_table_list = self._get_data(
            self.nwb_metadata['ElectrodeTable'])
        # electrode table has required arguments:
        required_args = ['group', 'x', 'y']
        default_ids = _get_default_column_ids(
            required_args, [i['name'] for i in electrode_table_list])
        non_default_ids = list(
            set(range(len(electrode_table_list))).difference(set(default_ids)))
        default_dict = {
            electrode_table_list[id]['name']: electrode_table_list[id]['data']
            for id in default_ids
        }
        if 'group' in default_dict:
            group_labels = default_dict['group']
        else:  # else fill with probe zero data.
            group_labels = np.concatenate([
                np.ones(self._one_data.
                        data_attrs_dump['electrode_table_length'][i],
                        dtype=int) * i for i in range(self.no_probes)
            ])
        for electrode_no in range(len(electrode_table_list[0]['data'])):
            if 'x' in default_dict:
                x = default_dict['x'][electrode_no][0]
                y = default_dict['y'][electrode_no][1]
            else:
                x = float('NaN')
                y = float('NaN')
            group_data = self.nwbfile.electrode_groups[self.nwb_metadata[
                'Probes'][group_labels[electrode_no]]['name']]
            self.nwbfile.add_electrode(x=x,
                                       y=y,
                                       z=float('NaN'),
                                       imp=float('NaN'),
                                       location='None',
                                       group=group_data,
                                       filtering='none')
        for id in non_default_ids:
            self.nwbfile.add_electrode_column(
                name=electrode_table_list[id]['name'],
                description=electrode_table_list[id]['description'],
                data=electrode_table_list[id]['data'])
        # create probes specific DynamicTableRegion:
        self.probe_dt_region = [
            self.nwbfile.create_electrode_table_region(region=list(
                range(self._one_data.data_attrs_dump['electrode_table_length']
                      [j])),
                                                       description=i['name'])
            for j, i in enumerate(self.nwb_metadata['Probes'])
        ]
        self.probe_dt_region_all = self.nwbfile.create_electrode_table_region(
            region=list(
                range(
                    sum(self._one_data.
                        data_attrs_dump['electrode_table_length']))),
            description='AllProbes')
        self.electrode_table_exist = True

    def create_timeseries_ecephys(self):
        """
        create SpikeEventSeries, ElectricalSeries, Spectrum datatypes within nwbfile>processing>ecephys
        """
        if self.no_probes == 0:
            return
        if not self.electrode_table_exist:
            self.create_electrode_table_ecephys()
        if 'ecephys' not in self.nwbfile.processing:
            mod = self.nwbfile.create_processing_module(
                'ecephys', 'Processed electrophysiology data of IBL')
        else:
            mod = self.nwbfile.get_processing_module('ecephys')
        for neurodata_type_name, neurodata_type_args_list in self.nwb_metadata[
                'Ecephys']['Ecephys'].items():
            data_retrieved_args_list = self._get_data(
                neurodata_type_args_list
            )  # list of dicts with keys as argument names
            for no, neurodata_type_args in enumerate(data_retrieved_args_list):
                ibl_dataset_name = neurodata_type_args_list[no]['data']
                if 'ElectricalSeries' in neurodata_type_name:
                    timestamps_names = self._one_data.data_attrs_dump[
                        '_iblqc_ephysTimeRms.timestamps']
                    data_names = self._one_data.data_attrs_dump[
                        '_iblqc_ephysTimeRms.rms']
                    for data_idx, data in enumerate(
                            neurodata_type_args['data']):
                        probe_no = [
                            j for j in range(self.no_probes)
                            if self.nwb_metadata['Probes'][j]['name'] in
                            data_names[data_idx]
                        ][0]
                        if data.shape[1] > self._one_data.data_attrs_dump[
                                'electrode_table_length'][probe_no]:
                            if 'channels.rawInd' in self._one_data.loaded_datasets:
                                channel_idx = self._one_data.loaded_datasets[
                                    'channels.rawInd'][probe_no].data.astype(
                                        'int')
                            else:
                                warnings.warn('could not find channels.rawInd')
                                break
                        else:
                            channel_idx = slice(None)
                        mod.add(
                            ElectricalSeries(
                                name=data_names[data_idx],
                                description=neurodata_type_args['description'],
                                timestamps=neurodata_type_args['timestamps']
                                [timestamps_names.index(data_names[data_idx])],
                                data=data[:, channel_idx],
                                electrodes=self.probe_dt_region[probe_no]))
                elif 'Spectrum' in neurodata_type_name:
                    if ibl_dataset_name in '_iblqc_ephysSpectralDensity.power':
                        freqs_names = self._one_data.data_attrs_dump[
                            '_iblqc_ephysSpectralDensity.freqs']
                        data_names = self._one_data.data_attrs_dump[
                            '_iblqc_ephysSpectralDensity.power']
                        for data_idx, data in enumerate(
                                neurodata_type_args['data']):
                            mod.add(
                                Spectrum(name=data_names[data_idx],
                                         frequencies=neurodata_type_args[
                                             'frequencies'][freqs_names.index(
                                                 data_names[data_idx])],
                                         power=data))
                elif 'SpikeEventSeries' in neurodata_type_name:
                    neurodata_type_args.update(
                        dict(electrodes=self.probe_dt_region_all))
                    mod.add(
                        pynwb.ecephys.SpikeEventSeries(**neurodata_type_args))

    def create_behavior(self):
        """
        Create behavior processing module
        """
        self.check_module('behavior')
        for behavior_datatype in self.nwb_metadata['Behavior']:
            if behavior_datatype == 'Position':
                position_cont = pynwb.behavior.Position()
                time_series_list_details = self._get_data(
                    self.nwb_metadata['Behavior'][behavior_datatype]
                    ['spatial_series'])
                if len(time_series_list_details) == 0:
                    continue
                # rate_list = [150.0,60.0,60.0] # based on the google doc for _iblrig_body/left/rightCamera.raw,
                dataname_list = self._one_data.data_attrs_dump['camera.dlc']
                data_list = time_series_list_details[0]['data']
                timestamps_list = time_series_list_details[0]['timestamps']
                for dataname, data, timestamps in zip(dataname_list, data_list,
                                                      timestamps_list):
                    colnames = data.columns
                    data_np = data.to_numpy()
                    x_column_ids = [
                        n for n, k in enumerate(colnames) if 'x' in k
                    ]
                    for x_column_id in x_column_ids:
                        data_loop = data_np[:, x_column_id:x_column_id + 2]
                        position_cont.create_spatial_series(
                            name=dataname + colnames[x_column_id][:-2],
                            data=data_loop,
                            reference_frame='none',
                            timestamps=timestamps,
                            conversion=1e-3)
                self.nwbfile.processing['behavior'].add(position_cont)
            elif not (behavior_datatype == 'BehavioralEpochs'):
                time_series_func = pynwb.TimeSeries
                time_series_list_details = self._get_data(
                    self.nwb_metadata['Behavior'][behavior_datatype]
                    ['time_series'])
                if len(time_series_list_details) == 0:
                    continue
                time_series_list_obj = []
                for i in time_series_list_details:
                    unit = 'radians/sec' if 'velocity' in i[
                        'name'] else 'radians'
                    time_series_list_obj.append(
                        time_series_func(**i, unit=unit))
                func = getattr(pynwb.behavior, behavior_datatype)
                self.nwbfile.processing['behavior'].add(
                    func(time_series=time_series_list_obj))
            else:
                time_series_func = pynwb.epoch.TimeIntervals
                time_series_list_details = self._get_data(
                    self.nwb_metadata['Behavior'][behavior_datatype]
                    ['time_intervals'])
                if len(time_series_list_details) == 0:
                    continue
                for k in time_series_list_details:
                    time_intervals = time_series_func('BehavioralEpochs')
                    for time_interval in k['timestamps']:
                        time_intervals.add_interval(
                            start_time=time_interval[0],
                            stop_time=time_interval[1])
                    time_intervals.add_column(k['name'],
                                              k['description'],
                                              data=k['data'])
                    self.nwbfile.processing['behavior'].add(time_intervals)

    def create_acquisition(self):
        """
        Acquisition data like audiospectrogram(raw beh data), nidq(raw ephys data), raw camera data.
        These are independent of probe type.
        """
        for neurodata_type_name, neurodata_type_args_list in self.nwb_metadata[
                'Acquisition'].items():
            data_retrieved_args_list = self._get_data(neurodata_type_args_list)
            for neurodata_type_args in data_retrieved_args_list:
                if neurodata_type_name == 'ImageSeries':
                    for types, times in zip(neurodata_type_args['data'],
                                            neurodata_type_args['timestamps']):
                        customargs = dict(name='camera_raw',
                                          external_file=[str(types)],
                                          format='external',
                                          timestamps=times,
                                          unit='n.a.')
                        self.nwbfile.add_acquisition(ImageSeries(**customargs))
                elif neurodata_type_name == 'DecompositionSeries':
                    neurodata_type_args['bands'] = np.squeeze(
                        neurodata_type_args['bands'])
                    freqs = DynamicTable(
                        'bands',
                        'spectogram frequencies',
                        id=np.arange(neurodata_type_args['bands'].shape[0]))
                    freqs.add_column('freq',
                                     'frequency value',
                                     data=neurodata_type_args['bands'])
                    neurodata_type_args.update(dict(bands=freqs))
                    temp = neurodata_type_args['data'][:, :, np.newaxis]
                    neurodata_type_args['data'] = np.moveaxis(
                        temp, [0, 1, 2], [0, 2, 1])
                    ts = neurodata_type_args.pop('timestamps')
                    starting_time = ts[0][0] if isinstance(
                        ts[0], np.ndarray) else ts[0]
                    neurodata_type_args.update(
                        dict(starting_time=np.float64(starting_time),
                             rate=1 / np.mean(np.diff(ts.squeeze())),
                             unit='sec'))
                    self.nwbfile.add_acquisition(
                        DecompositionSeries(**neurodata_type_args))
                elif neurodata_type_name == 'ElectricalSeries':
                    if not self.electrode_table_exist:
                        self.create_electrode_table_ecephys()
                    if neurodata_type_args['name'] in ['raw.lf', 'raw.ap']:
                        for probe_no in range(self.no_probes):
                            if neurodata_type_args['data'][probe_no].shape[
                                    1] > self._one_data.data_attrs_dump[
                                        'electrode_table_length'][probe_no]:
                                if 'channels.rawInd' in self._one_data.loaded_datasets:
                                    channel_idx = self._one_data.loaded_datasets[
                                        'channels.rawInd'][
                                            probe_no].data.astype('int')
                                else:
                                    warnings.warn(
                                        'could not find channels.rawInd')
                                    break
                            else:
                                channel_idx = slice(None)
                            self.nwbfile.add_acquisition(
                                ElectricalSeries(
                                    name=neurodata_type_args['name'] + '_' +
                                    self.nwb_metadata['Probes'][probe_no]
                                    ['name'],
                                    starting_time=np.abs(
                                        np.round(
                                            neurodata_type_args['timestamps']
                                            [probe_no][0, 1], 2)
                                    ),  # round starting times of the order of 1e-5
                                    rate=neurodata_type_args['data']
                                    [probe_no].fs,
                                    data=H5DataIO(
                                        DataChunkIterator(
                                            _iter_datasetview(
                                                neurodata_type_args['data']
                                                [probe_no],
                                                channel_ids=channel_idx),
                                            buffer_size=self.buffer_size),
                                        compression=True,
                                        shuffle=self.shuffle,
                                        compression_opts=self.complevel),
                                    electrodes=self.probe_dt_region[probe_no],
                                    channel_conversion=neurodata_type_args[
                                        'data']
                                    [probe_no].channel_conversion_sample2v[
                                        neurodata_type_args['data']
                                        [probe_no].type][channel_idx]))
                    elif neurodata_type_args['name'] in ['raw.nidq']:
                        self.nwbfile.add_acquisition(
                            ElectricalSeries(**neurodata_type_args))

    def create_probes(self):
        """
        Fills in all the probes metadata into the custom NeuroPixels extension.
        """
        for i in self.nwb_metadata['Probes']:
            self.nwbfile.add_device(IblProbes(**i))

    def create_iblsubject(self):
        """
        Populates the custom subject extension for IBL mice daata
        """
        self.nwbfile.subject = IblSubject(**self.nwb_metadata['IBLSubject'])

    def create_lab_meta_data(self):
        """
        Populates the custom lab_meta_data extension for IBL sessions data
        """
        self.nwbfile.add_lab_meta_data(
            IblSessionData(**self.nwb_metadata['IBLSessionsData']))

    def create_trials(self):
        table_data = self._get_data(self.nwb_metadata['Trials'])
        required_fields = ['start_time', 'stop_time']
        required_data = [i for i in table_data if i['name'] in required_fields]
        optional_data = [
            i for i in table_data if i['name'] not in required_fields
        ]
        if len(required_fields) != len(required_data):
            warnings.warn(
                'could not find required datasets: trials.start_time, trials.stop_time, '
                'skipping trials table')
            return
        for start_time, stop_time in zip(required_data[0]['data'][:, 0],
                                         required_data[1]['data'][:, 1]):
            self.nwbfile.add_trial(start_time=start_time, stop_time=stop_time)
        for op_data in optional_data:
            if op_data['data'].shape[0] == required_data[0]['data'].shape[0]:
                self.nwbfile.add_trial_column(
                    name=op_data['name'],
                    description=op_data['description'],
                    data=op_data['data'])
            else:
                warnings.warn(
                    f'shape of trials.{op_data["name"]} does not match other trials.* datasets'
                )

    def _get_data(self, sub_metadata):
        """
        Uses OneData class to query ONE datasets on server and download them locally
        Parameters
        ----------
        sub_metadata: [list, dict]
            list of metadata dicts containing a data key with a dataset type string as value to retrieve data from(npy, tsv etc)

        Returns
        -------
        out_dict: dict
            dictionary with actual data loaded in the data field
        """
        include_idx = []
        out_dict_trim = []
        alt_datatypes = ['bands', 'power', 'frequencies', 'timestamps']
        if isinstance(sub_metadata, list):
            out_dict = deepcopy(sub_metadata)
        elif isinstance(sub_metadata, dict):
            out_dict = deepcopy(list(sub_metadata))
        else:
            return []
        req_datatypes = ['data']
        for count, neurodata_type_args in enumerate(out_dict):
            for alt_names in alt_datatypes:
                if neurodata_type_args.get(
                        alt_names
                ):  # in case of Decomposotion series, Spectrum
                    neurodata_type_args[
                        alt_names] = self._one_data.download_dataset(
                            neurodata_type_args[alt_names],
                            neurodata_type_args['name'])
                    req_datatypes.append(alt_names)
            if neurodata_type_args[
                    'name'] == 'id':  # valid in case of units table.
                neurodata_type_args['data'] = self._one_data.download_dataset(
                    neurodata_type_args['data'], 'cluster_id')
            else:
                out_dict[count]['data'] = self._one_data.download_dataset(
                    neurodata_type_args['data'], neurodata_type_args['name'])
            if all([out_dict[count][i] is not None for i in req_datatypes]):
                include_idx.extend([count])
        out_dict_trim.extend([out_dict[j0] for j0 in include_idx])
        return out_dict_trim

    def run_conversion(self):
        """
        Single method to create all datasets and metadata in nwbfile in one go
        Returns
        -------

        """
        execute_list = [
            self.create_stimulus, self.create_trials,
            self.create_electrode_table_ecephys,
            self.create_timeseries_ecephys, self.create_units,
            self.create_behavior, self.create_probes, self.create_iblsubject,
            self.create_lab_meta_data, self.create_acquisition
        ]
        t = tqdm(execute_list)
        for i in t:
            t.set_postfix(current=f'creating nwb ' + i.__name__.split('_')[-1])
            i()
        print('done converting')

    def write_nwb(self, read_check=True):
        """
        After run_conversion(), write nwbfile to disk with the loaded nwbfile
        Parameters
        ----------
        read_check: bool
            Round trip verification
        """
        print('Saving to file, please wait...')
        with NWBHDF5IO(self.saveloc, 'w') as io:
            io.write(self.nwbfile)
            print('File successfully saved at: ', str(self.saveloc))

        if read_check:
            with NWBHDF5IO(self.saveloc, 'r') as io:
                io.read()
                print('Read check: OK')
예제 #15
0
def conversion_function(source_paths,
                        f_nwb,
                        metadata,
                        add_raw=False,
                        add_processed=True,
                        add_behavior=True,
                        plot_rois=False):
    """
    Copy data stored in a set of .npz files to a single NWB file.

    Parameters
    ----------
    source_paths : dict
        Dictionary with paths to source files/directories. e.g.:
        {'raw_data': {'type': 'file', 'path': ''},
         'raw_info': {'type': 'file', 'path': ''}
         'processed_data': {'type': 'file', 'path': ''},
         'sparse_matrix': {'type': 'file', 'path': ''},
         'ref_image',: {'type': 'file', 'path': ''}}
    f_nwb : str
        Path to output NWB file, e.g. 'my_file.nwb'.
    metadata : dict
        Metadata dictionary
    add_raw : bool
        Whether to convert raw data or not.
    add_processed : bool
        Whether to convert processed data or not.
    add_behavior : bool
        Whether to convert behavior data or not.
    plot_rois : bool
        Plot ROIs
    """

    # Source files
    file_raw = None
    file_info = None
    file_processed = None
    file_sparse_matrix = None
    file_reference_image = None
    for k, v in source_paths.items():
        if source_paths[k]['path'] != '':
            fname = source_paths[k]['path']
            if k == 'raw_data':
                file_raw = h5py.File(fname, 'r')
            if k == 'raw_info':
                file_info = scipy.io.loadmat(fname,
                                             struct_as_record=False,
                                             squeeze_me=True)
            if k == 'processed_data':
                file_processed = np.load(fname)
            if k == 'sparse_matrix':
                file_sparse_matrix = np.load(fname)
            if k == 'ref_image':
                file_reference_image = np.load(fname)

    # Initialize a NWB object
    nwb = NWBFile(**metadata['NWBFile'])

    # Create and add device
    device = Device(name=metadata['Ophys']['Device'][0]['name'])
    nwb.add_device(device)

    # Creates one Imaging Plane for each channel
    fs = 1. / (file_processed['time'][0][1] - file_processed['time'][0][0])
    for meta_ip in metadata['Ophys']['ImagingPlane']:
        # Optical channel
        opt_ch = OpticalChannel(
            name=meta_ip['optical_channel'][0]['name'],
            description=meta_ip['optical_channel'][0]['description'],
            emission_lambda=meta_ip['optical_channel'][0]['emission_lambda'])
        nwb.create_imaging_plane(
            name=meta_ip['name'],
            optical_channel=opt_ch,
            description=meta_ip['description'],
            device=device,
            excitation_lambda=meta_ip['excitation_lambda'],
            imaging_rate=fs,
            indicator=meta_ip['indicator'],
            location=meta_ip['location'],
        )

    # Raw optical data
    if add_raw:
        print('Adding raw data...')
        for meta_tps in metadata['Ophys']['TwoPhotonSeries']:
            if meta_tps['name'][-1] == 'R':
                raw_data = file_raw['R']
            else:
                raw_data = file_raw['Y']

            def data_gen(data):
                xl, yl, zl, tl = data.shape
                chunk = 0
                while chunk < tl:
                    val = data[:, :, :, chunk]
                    chunk += 1
                    print('adding data chunk: ', chunk)
                    yield val

            xl, yl, zl, tl = raw_data.shape
            tps_data = DataChunkIterator(data=data_gen(data=raw_data),
                                         iter_axis=0,
                                         maxshape=(tl, xl, yl, zl))

            # Change dimensions from (X,Y,Z,T) in mat file to (T,X,Y,Z) nwb standard
            #raw_data = np.moveaxis(raw_data, -1, 0)

            tps = TwoPhotonSeries(
                name=meta_tps['name'],
                imaging_plane=nwb.imaging_planes[meta_tps['imaging_plane']],
                data=tps_data,
                rate=file_info['info'].daq.scanRate)
            nwb.add_acquisition(tps)

    # Processed data
    if add_processed:
        print('Adding processed data...')
        ophys_module = ProcessingModule(
            name='Ophys',
            description='contains optical physiology processed data.',
        )
        nwb.add_processing_module(ophys_module)

        # Create Image Segmentation compartment
        img_seg = ImageSegmentation(
            name=metadata['Ophys']['ImageSegmentation']['name'])
        ophys_module.add(img_seg)

        # Create plane segmentation and add ROIs
        meta_ps = metadata['Ophys']['ImageSegmentation'][
            'plane_segmentations'][0]
        ps = img_seg.create_plane_segmentation(
            name=meta_ps['name'],
            description=meta_ps['description'],
            imaging_plane=nwb.imaging_planes[meta_ps['imaging_plane']],
        )

        # Add ROIs
        indices = file_sparse_matrix['indices']
        indptr = file_sparse_matrix['indptr']
        dims = np.squeeze(file_processed['dims'])
        for start, stop in zip(indptr, indptr[1:]):
            voxel_mask = make_voxel_mask(indices[start:stop], dims)
            ps.add_roi(voxel_mask=voxel_mask)

        # Visualize 3D voxel masks
        if plot_rois:
            plot_rois_function(plane_segmentation=ps, indptr=indptr)

        # DFF measures
        dff = DfOverF(name=metadata['Ophys']['DfOverF']['name'])
        ophys_module.add(dff)

        # create ROI regions
        n_cells = file_processed['dFF'].shape[0]
        roi_region = ps.create_roi_table_region(description='RoiTableRegion',
                                                region=list(range(n_cells)))

        # create ROI response series
        dff_data = file_processed['dFF']
        tt = file_processed['time'].ravel()
        meta_rrs = metadata['Ophys']['DfOverF']['roi_response_series'][0]
        meta_rrs['data'] = dff_data.T
        meta_rrs['rois'] = roi_region
        meta_rrs['timestamps'] = tt
        dff.create_roi_response_series(**meta_rrs)

        # Creates GrayscaleVolume containers and add a reference image
        grayscale_volume = GrayscaleVolume(
            name=metadata['Ophys']['GrayscaleVolume']['name'],
            data=file_reference_image['im'])
        ophys_module.add(grayscale_volume)

    # Behavior data
    if add_behavior:
        print('Adding behavior data...')
        # Ball motion
        behavior_mod = nwb.create_processing_module(
            name='Behavior',
            description='holds processed behavior data',
        )
        meta_ts = metadata['Behavior']['TimeSeries'][0]
        meta_ts['data'] = file_processed['ball'].ravel()
        tt = file_processed['time'].ravel()
        meta_ts['timestamps'] = tt
        behavior_ts = TimeSeries(**meta_ts)
        behavior_mod.add(behavior_ts)

        # Re-arranges spatial data of body-points positions tracking
        pos = file_processed['dlc']
        n_points = 8
        pos_reshaped = pos.reshape(
            (-1, n_points, 3))  # dims=(nSamples,n_points,3)

        # Creates a Position object and add one SpatialSeries for each body-point position
        position = Position()
        for i in range(n_points):
            position.create_spatial_series(
                name='SpatialSeries_' + str(i),
                data=pos_reshaped[:, i, :],
                timestamps=tt,
                reference_frame=
                'Description defining what the zero-position is.',
                conversion=np.nan)
        behavior_mod.add(position)

    # Trial times
    trialFlag = file_processed['trialFlag'].ravel()
    trial_inds = np.hstack(
        (0, np.where(np.diff(trialFlag))[0], trialFlag.shape[0] - 1))
    trial_times = tt[trial_inds]

    for start, stop in zip(trial_times, trial_times[1:]):
        nwb.add_trial(start_time=start, stop_time=stop)

    # Saves to NWB file
    with NWBHDF5IO(f_nwb, mode='w') as io:
        io.write(nwb)
    print('NWB file saved with size: ', os.stat(f_nwb).st_size / 1e6, ' mb')
예제 #16
0
    def save(self,
             file_name,
             to32=True,
             order='F',
             imagej=False,
             bigtiff=True,
             excitation_lambda=488.0,
             compress=0,
             q_max=99.75,
             q_min=1,
             var_name_hdf5='mov',
             sess_desc='some_description',
             identifier='some identifier',
             imaging_plane_description='some imaging plane description',
             emission_lambda=520.0,
             indicator='OGB-1',
             location='brain',
             starting_time=0.,
             experimenter='Dr Who',
             lab_name=None,
             institution=None,
             experiment_description='Experiment Description',
             session_id='Session ID'):
        """
        Save the timeseries in single precision. Supported formats include
        TIFF, NPZ, AVI, MAT, HDF5/H5, MMAP, and NWB

        Args:
            file_name: str
                name of file. Possible formats are tif, avi, npz, mmap and hdf5

            to32: Bool
                whether to transform to 32 bits

            order: 'F' or 'C'
                C or Fortran order

            var_name_hdf5: str
                Name of hdf5 file subdirectory

            q_max, q_min: float in [0, 100]
                percentile for maximum/minimum clipping value if saving as avi
                (If set to None, no automatic scaling to the dynamic range [0, 255] is performed)

        Raises:
            Exception 'Extension Unknown'

        """
        name, extension = os.path.splitext(file_name)[:2]
        extension = extension.lower()
        logging.debug("Parsing extension " + str(extension))

        if extension in ['.tif', '.tiff', '.btf']:
            with tifffile.TiffWriter(file_name, bigtiff=bigtiff,
                                     imagej=imagej) as tif:
                for i in range(self.shape[0]):
                    if i % 200 == 0:
                        logging.debug(str(i) + ' frames saved')

                    curfr = self[i].copy()
                    if to32 and not ('float32' in str(self.dtype)):
                        curfr = curfr.astype(np.float32)
                    tif.save(curfr, compress=compress)
        elif extension == '.npz':
            if to32 and not ('float32' in str(self.dtype)):
                input_arr = self.astype(np.float32)
            else:
                input_arr = np.array(self)

            np.savez(file_name,
                     input_arr=input_arr,
                     start_time=self.start_time,
                     fr=self.fr,
                     meta_data=self.meta_data,
                     file_name=self.file_name)
        elif extension == '.avi':
            codec = None
            try:
                codec = cv2.FOURCC('I', 'Y', 'U', 'V')
            except AttributeError:
                codec = cv2.VideoWriter_fourcc(*'IYUV')
            if q_max is None or q_min is None:
                data = self.astype(np.uint8)
            else:
                if q_max < 100:
                    maxmov = np.nanpercentile(self[::max(1,
                                                         len(self) // 100)],
                                              q_max)
                else:
                    maxmov = np.nanmax(self)
                if q_min > 0:
                    minmov = np.nanpercentile(self[::max(1,
                                                         len(self) // 100)],
                                              q_min)
                else:
                    minmov = np.nanmin(self)
                data = 255 * (self - minmov) / (maxmov - minmov)
                np.clip(data, 0, 255, data)
                data = data.astype(np.uint8)

            y, x = data[0].shape
            vw = cv2.VideoWriter(file_name,
                                 codec,
                                 self.fr, (x, y),
                                 isColor=True)
            for d in data:
                vw.write(cv2.cvtColor(d, cv2.COLOR_GRAY2BGR))
            vw.release()

        elif extension == '.mat':
            if self.file_name[0] is not None:
                f_name = self.file_name
            else:
                f_name = ''

            if to32 and not ('float32' in str(self.dtype)):
                input_arr = self.astype(np.float32)
            else:
                input_arr = np.array(self)

            if self.meta_data[0] is None:
                savemat(
                    file_name, {
                        'input_arr': np.rollaxis(input_arr, axis=0, start=3),
                        'start_time': self.start_time,
                        'fr': self.fr,
                        'meta_data': [],
                        'file_name': f_name
                    })
            else:
                savemat(
                    file_name, {
                        'input_arr': np.rollaxis(input_arr, axis=0, start=3),
                        'start_time': self.start_time,
                        'fr': self.fr,
                        'meta_data': self.meta_data,
                        'file_name': f_name
                    })

        elif extension in ('.hdf5', '.h5'):
            with h5py.File(file_name, "w") as f:
                if to32 and not ('float32' in str(self.dtype)):
                    input_arr = self.astype(np.float32)
                else:
                    input_arr = np.array(self)

                dset = f.create_dataset(var_name_hdf5, data=input_arr)
                dset.attrs["fr"] = self.fr
                dset.attrs["start_time"] = self.start_time
                try:
                    dset.attrs["file_name"] = [
                        a.encode('utf8') for a in self.file_name
                    ]
                except:
                    logging.warning('No file saved')
                if self.meta_data[0] is not None:
                    logging.debug("Metadata for saved file: " +
                                  str(self.meta_data))
                    dset.attrs["meta_data"] = cpk.dumps(self.meta_data)
        elif extension == '.mmap':
            base_name = name

            T = self.shape[0]
            dims = self.shape[1:]
            if to32 and not ('float32' in str(self.dtype)):
                input_arr = self.astype(np.float32)
            else:
                input_arr = np.array(self)

            input_arr = np.transpose(input_arr,
                                     list(range(1,
                                                len(dims) + 1)) + [0])
            input_arr = np.reshape(input_arr, (np.prod(dims), T), order='F')

            fname_tot = memmap_frames_filename(base_name, dims, T, order)
            fname_tot = os.path.join(os.path.split(file_name)[0], fname_tot)
            big_mov = np.memmap(fname_tot,
                                mode='w+',
                                dtype=np.float32,
                                shape=(np.uint64(np.prod(dims)), np.uint64(T)),
                                order=order)

            big_mov[:] = np.asarray(input_arr, dtype=np.float32)
            big_mov.flush()
            del big_mov, input_arr
            return fname_tot
        elif extension == '.nwb':
            if to32 and not ('float32' in str(self.dtype)):
                input_arr = self.astype(np.float32)
            else:
                input_arr = np.array(self)
            # Create NWB file
            nwbfile = NWBFile(sess_desc,
                              identifier,
                              datetime.now(tzlocal()),
                              experimenter=experimenter,
                              lab=lab_name,
                              institution=institution,
                              experiment_description=experiment_description,
                              session_id=session_id)
            # Get the device
            device = Device('imaging_device')
            nwbfile.add_device(device)
            # OpticalChannel
            optical_channel = OpticalChannel('OpticalChannel',
                                             'main optical channel',
                                             emission_lambda=emission_lambda)
            imaging_plane = nwbfile.create_imaging_plane(
                name='ImagingPlane',
                optical_channel=optical_channel,
                description=imaging_plane_description,
                device=device,
                excitation_lambda=excitation_lambda,
                imaging_rate=self.fr,
                indicator=indicator,
                location=location)
            # Images
            image_series = TwoPhotonSeries(name=var_name_hdf5,
                                           dimension=self.shape[1:],
                                           data=input_arr,
                                           imaging_plane=imaging_plane,
                                           starting_frame=[0],
                                           starting_time=starting_time,
                                           rate=self.fr)

            nwbfile.add_acquisition(image_series)

            with NWBHDF5IO(file_name, 'w') as io:
                io.write(nwbfile)

            return file_name

        else:
            logging.error("Extension " + str(extension) + " unknown")
            raise Exception('Extension Unknown')
예제 #17
0
def nwb_copy_file(old_file, new_file, cp_objs={}, save_to_file=True):
    """
    Copy fields defined in 'obj', from existing NWB file to new NWB file.

    Parameters
    ----------
    old_file : str, path, nwbfile
        String or path to nwb file '/path/to/old_file.nwb'. Alternatively, the
        nwbfile object.
    new_file : str, path
        String such as '/path/to/new_file.nwb'.
    cp_objs : dict
        Name:Value pairs (Group:Children) listing the groups and respective
        children from the current NWB file to be copied. Children can be:
        - Boolean, indicating an attribute (e.g. for institution, lab)
        - List of strings, containing several children names
        Example:
        {'institution':True,
         'lab':True,
         'acquisition':['microphone'],
         'ecephys':['LFP','DecompositionSeries']}
    save_to_file: Boolean
        If True, saves directly to new_file.nwb. If False, only returns nwb_new.

    Returns:
    --------
    nwb_new : nwbfile object
    """

    manager = get_manager()

    # Get from nwbfile object in memory or from file
    if isinstance(old_file, NWBFile):
        nwb_old = old_file
        io1 = False
    else:
        io1 = NWBHDF5IO(str(old_file),
                        'r',
                        manager=manager,
                        load_namespaces=True)
        nwb_old = io1.read()

    # Creates new file
    nwb_new = NWBFile(
        session_description=str(nwb_old.session_description),
        identifier=id_generator(),
        session_start_time=nwb_old.session_start_time,
    )
    with NWBHDF5IO(new_file, mode='w', manager=manager,
                   load_namespaces=False) as io2:
        # Institution name ------------------------------------------------
        if 'institution' in cp_objs:
            nwb_new.institution = str(nwb_old.institution)

        # Lab name --------------------------------------------------------
        if 'lab' in cp_objs:
            nwb_new.lab = str(nwb_old.lab)

        # Session id ------------------------------------------------------
        if 'session' in cp_objs:
            nwb_new.session_id = nwb_old.session_id

        # Devices ---------------------------------------------------------
        if 'devices' in cp_objs:
            for aux in list(nwb_old.devices.keys()):
                dev = Device(nwb_old.devices[aux].name)
                nwb_new.add_device(dev)

        # Electrode groups ------------------------------------------------
        if 'electrode_groups' in cp_objs and nwb_old.electrode_groups is not None:
            for aux in list(nwb_old.electrode_groups.keys()):
                nwb_new.create_electrode_group(
                    name=str(nwb_old.electrode_groups[aux].name),
                    description=str(nwb_old.electrode_groups[aux].description),
                    location=str(nwb_old.electrode_groups[aux].location),
                    device=nwb_new.get_device(
                        nwb_old.electrode_groups[aux].device.name))

        # Electrodes ------------------------------------------------------
        if 'electrodes' in cp_objs and nwb_old.electrodes is not None:
            nElec = len(nwb_old.electrodes['x'].data[:])
            for aux in np.arange(nElec):
                nwb_new.add_electrode(
                    x=nwb_old.electrodes['x'][aux],
                    y=nwb_old.electrodes['y'][aux],
                    z=nwb_old.electrodes['z'][aux],
                    imp=nwb_old.electrodes['imp'][aux],
                    location=str(nwb_old.electrodes['location'][aux]),
                    filtering=str(nwb_old.electrodes['filtering'][aux]),
                    group=nwb_new.get_electrode_group(
                        nwb_old.electrodes['group'][aux].name),
                    group_name=str(nwb_old.electrodes['group_name'][aux]))
            # if there are custom variables
            new_vars = list(nwb_old.electrodes.colnames)
            default_vars = [
                'x', 'y', 'z', 'imp', 'location', 'filtering', 'group',
                'group_name'
            ]
            [new_vars.remove(var) for var in default_vars]
            for var in new_vars:
                if var == 'label':
                    var_data = [
                        str(elem) for elem in nwb_old.electrodes[var].data[:]
                    ]
                else:
                    var_data = np.array(nwb_old.electrodes[var].data[:])

                nwb_new.add_electrode_column(
                    name=str(var),
                    description=str(nwb_old.electrodes[var].description),
                    data=var_data)

            # If Bipolar scheme for electrodes
            for v in nwb_old.lab_meta_data.values():
                if isinstance(v, EcephysExt) and hasattr(
                        v, 'bipolar_scheme_table'):
                    bst_old = v.bipolar_scheme_table
                    bst_new = BipolarSchemeTable(
                        name=bst_old.name, description=bst_old.description)
                    ecephys_ext = EcephysExt(name=v.name)
                    ecephys_ext.bipolar_scheme_table = bst_new
                    nwb_new.add_lab_meta_data(ecephys_ext)

        # Epochs ----------------------------------------------------------
        if 'epochs' in cp_objs and nwb_old.epochs is not None:
            nEpochs = len(nwb_old.epochs['start_time'].data[:])
            for i in np.arange(nEpochs):
                nwb_new.add_epoch(
                    start_time=nwb_old.epochs['start_time'].data[i],
                    stop_time=nwb_old.epochs['stop_time'].data[i])
            # if there are custom variables
            new_vars = list(nwb_old.epochs.colnames)
            default_vars = ['start_time', 'stop_time', 'tags', 'timeseries']
            [new_vars.remove(var) for var in default_vars if var in new_vars]
            for var in new_vars:
                nwb_new.add_epoch_column(
                    name=var,
                    description=nwb_old.epochs[var].description,
                    data=nwb_old.epochs[var].data[:])

        # Invalid times ---------------------------------------------------
        if 'invalid_times' in cp_objs and nwb_old.invalid_times is not None:
            nInvalid = len(nwb_old.invalid_times['start_time'][:])
            for aux in np.arange(nInvalid):
                nwb_new.add_invalid_time_interval(
                    start_time=nwb_old.invalid_times['start_time'][aux],
                    stop_time=nwb_old.invalid_times['stop_time'][aux])

        # Trials ----------------------------------------------------------
        if 'trials' in cp_objs and nwb_old.trials is not None:
            nTrials = len(nwb_old.trials['start_time'])
            for aux in np.arange(nTrials):
                nwb_new.add_trial(start_time=nwb_old.trials['start_time'][aux],
                                  stop_time=nwb_old.trials['stop_time'][aux])
            # if there are custom variables
            new_vars = list(nwb_old.trials.colnames)
            default_vars = ['start_time', 'stop_time']
            [new_vars.remove(var) for var in default_vars]
            for var in new_vars:
                nwb_new.add_trial_column(
                    name=var,
                    description=nwb_old.trials[var].description,
                    data=nwb_old.trials[var].data[:])

        # Intervals -------------------------------------------------------
        if 'intervals' in cp_objs and nwb_old.intervals is not None:
            all_objs_names = list(nwb_old.intervals.keys())
            for obj_name in all_objs_names:
                obj_old = nwb_old.intervals[obj_name]
                # create and add TimeIntervals
                obj = TimeIntervals(name=obj_old.name,
                                    description=obj_old.description)
                nInt = len(obj_old['start_time'])
                for ind in np.arange(nInt):
                    obj.add_interval(start_time=obj_old['start_time'][ind],
                                     stop_time=obj_old['stop_time'][ind])
                # Add to file
                nwb_new.add_time_intervals(obj)

        # Stimulus --------------------------------------------------------
        if 'stimulus' in cp_objs:
            all_objs_names = list(nwb_old.stimulus.keys())
            for obj_name in all_objs_names:
                obj_old = nwb_old.stimulus[obj_name]
                obj = TimeSeries(name=obj_old.name,
                                 description=obj_old.description,
                                 data=obj_old.data[:],
                                 rate=obj_old.rate,
                                 resolution=obj_old.resolution,
                                 conversion=obj_old.conversion,
                                 starting_time=obj_old.starting_time,
                                 unit=obj_old.unit)
                nwb_new.add_stimulus(obj)

        # Processing modules ----------------------------------------------
        if 'ecephys' in cp_objs:
            interfaces = [
                nwb_old.processing['ecephys'].data_interfaces[key]
                for key in cp_objs['ecephys']
            ]
            # Add ecephys module to NWB file
            ecephys_module = ProcessingModule(
                name='ecephys',
                description='Extracellular electrophysiology data.')
            nwb_new.add_processing_module(ecephys_module)
            for interface_old in interfaces:
                obj = copy_obj(interface_old, nwb_old, nwb_new)
                if obj is not None:
                    ecephys_module.add_data_interface(obj)

        if 'behavior' in cp_objs:
            interfaces = [
                nwb_old.processing['behavior'].data_interfaces[key]
                for key in cp_objs['behavior']
            ]
            if 'behavior' not in nwb_new.processing:
                # Add behavior module to NWB file
                behavior_module = ProcessingModule(
                    name='behavior', description='behavioral data.')
                nwb_new.add_processing_module(behavior_module)
            for interface_old in interfaces:
                obj = copy_obj(interface_old, nwb_old, nwb_new)
                if obj is not None:
                    behavior_module.add_data_interface(obj)

        # Acquisition -----------------------------------------------------
        # Can get raw ElecetricalSeries and Mic recording
        if 'acquisition' in cp_objs:
            for acq_name in cp_objs['acquisition']:
                obj_old = nwb_old.acquisition[acq_name]
                acq = copy_obj(obj_old, nwb_old, nwb_new)
                nwb_new.add_acquisition(acq)

        # Surveys ---------------------------------------------------------
        if 'surveys' in cp_objs and 'behavior' in nwb_old.processing:
            surveys_list = [
                v for v in
                nwb_old.processing['behavior'].data_interfaces.values()
                if v.neurodata_type == 'SurveyTable'
            ]
            if cp_objs['surveys'] and len(surveys_list) > 0:
                if 'behavior' not in nwb_new.processing:
                    # Add behavior module to NWB file
                    behavior_module = ProcessingModule(
                        name='behavior', description='behavioral data.')
                    nwb_new.add_processing_module(behavior_module)
                for obj_old in surveys_list:
                    srv = copy_obj(obj_old, nwb_old, nwb_new)
                    behavior_module.add_data_interface(srv)

        # Subject ---------------------------------------------------------
        if nwb_old.subject is not None:
            if 'subject' in cp_objs:
                try:
                    cortical_surfaces = CorticalSurfaces()
                    surfaces = nwb_old.subject.cortical_surfaces.surfaces
                    for sfc in list(surfaces.keys()):
                        cortical_surfaces.create_surface(
                            name=surfaces[sfc].name,
                            faces=surfaces[sfc].faces,
                            vertices=surfaces[sfc].vertices)
                    nwb_new.subject = ECoGSubject(
                        cortical_surfaces=cortical_surfaces,
                        subject_id=nwb_old.subject.subject_id,
                        age=nwb_old.subject.age,
                        description=nwb_old.subject.description,
                        genotype=nwb_old.subject.genotype,
                        sex=nwb_old.subject.sex,
                        species=nwb_old.subject.species,
                        weight=nwb_old.subject.weight,
                        date_of_birth=nwb_old.subject.date_of_birth)
                except:
                    nwb_new.subject = Subject(**nwb_old.subject.fields)

        # Write new file with copied fields
        if save_to_file:
            io2.write(nwb_new, link_data=False)

    # Close old file and return new nwbfile object
    if io1:
        io1.close()

    return nwb_new