Esempio n. 1
0
def add_eye_gaze_mapping_data_to_nwbfile(nwbfile: pynwb.NWBFile,
                                         eye_gaze_data: dict) -> pynwb.NWBFile:
    raw_gaze_mapping_mod, filt_gaze_mapping_mod = create_gaze_mapping_nwb_processing_modules(eye_gaze_data)
    nwbfile.add_processing_module(raw_gaze_mapping_mod)
    nwbfile.add_processing_module(filt_gaze_mapping_mod)

    return nwbfile
Esempio n. 2
0
    def to_nwb(self, nwbfile: NWBFile) -> NWBFile:
        eye_tracking_rig_mod = pynwb.ProcessingModule(
            name='eye_tracking_rig_metadata',
            description='Eye tracking rig metadata module')

        nwb_extension = load_pynwb_extension(OphysEyeTrackingRigMetadataSchema,
                                             'ndx-aibs-behavior-ophys')

        rig_metadata = nwb_extension(
            name="eye_tracking_rig_metadata",
            equipment=self._equipment,
            monitor_position=list(self._monitor_position_mm),
            monitor_position__unit_of_measurement="mm",
            camera_position=list(self._camera_position_mm),
            camera_position__unit_of_measurement="mm",
            led_position=list(self._led_position),
            led_position__unit_of_measurement="mm",
            monitor_rotation=list(self._monitor_rotation_deg),
            monitor_rotation__unit_of_measurement="deg",
            camera_rotation=list(self._camera_rotation_deg),
            camera_rotation__unit_of_measurement="deg")

        eye_tracking_rig_mod.add_data_interface(rig_metadata)
        nwbfile.add_processing_module(eye_tracking_rig_mod)
        return nwbfile
Esempio n. 3
0
    def add_eye_tracking_rig_geometry_data_to_nwbfile(
            nwbfile: NWBFile, eye_tracking_rig_geometry: dict) -> NWBFile:
        """ Rig geometry dict should consist of the following fields:
        monitor_position_mm: [x, y, z]
        monitor_rotation_deg: [x, y, z]
        camera_position_mm: [x, y, z]
        camera_rotation_deg: [x, y, z]
        led_position: [x, y, z]
        equipment: A string describing rig
        """
        eye_tracking_rig_mod = pynwb.ProcessingModule(
            name='eye_tracking_rig_metadata',
            description='Eye tracking rig metadata module')

        ophys_eye_tracking_rig_metadata = load_pynwb_extension(
            OphysEyeTrackingRigMetadataSchema, 'ndx-aibs-behavior-ophys')

        rig_metadata = ophys_eye_tracking_rig_metadata(
            name="eye_tracking_rig_metadata",
            equipment=eye_tracking_rig_geometry['equipment'],
            monitor_position=eye_tracking_rig_geometry['monitor_position_mm'],
            monitor_position__unit_of_measurement="mm",
            camera_position=eye_tracking_rig_geometry['camera_position_mm'],
            camera_position__unit_of_measurement="mm",
            led_position=eye_tracking_rig_geometry['led_position'],
            led_position__unit_of_measurement="mm",
            monitor_rotation=eye_tracking_rig_geometry['monitor_rotation_deg'],
            monitor_rotation__unit_of_measurement="deg",
            camera_rotation=eye_tracking_rig_geometry['camera_rotation_deg'],
            camera_rotation__unit_of_measurement="deg")

        eye_tracking_rig_mod.add_data_interface(rig_metadata)
        nwbfile.add_processing_module(eye_tracking_rig_mod)

        return nwbfile
Esempio n. 4
0
    def to_nwb(self, nwbfile: NWBFile) -> NWBFile:

        # If there is no rewards data, do not
        # write anything to the NWB file (this
        # is expected for passive sessions)
        if len(self.value['timestamps']) == 0:
            return nwbfile

        reward_volume_ts = TimeSeries(
            name='volume',
            data=self.value['volume'].values,
            timestamps=self.value['timestamps'].values,
            unit='mL'
        )

        autorewarded_ts = TimeSeries(
            name='autorewarded',
            data=self.value['autorewarded'].values,
            timestamps=reward_volume_ts.timestamps,
            unit='mL'
        )

        rewards_mod = ProcessingModule('rewards',
                                       'Licking behavior processing module')
        rewards_mod.add_data_interface(reward_volume_ts)
        rewards_mod.add_data_interface(autorewarded_ts)
        nwbfile.add_processing_module(rewards_mod)

        return nwbfile
Esempio n. 5
0
    def to_nwb(self, nwbfile: NWBFile) -> NWBFile:
        running_speed: pd.DataFrame = self.value
        data = running_speed['speed'].values
        timestamps = running_speed['timestamps'].values

        if self._filtered:
            data_interface_name = "speed"
        else:
            data_interface_name = "speed_unfiltered"

        running_speed_series = TimeSeries(name=data_interface_name,
                                          data=data,
                                          timestamps=timestamps,
                                          unit='cm/s')

        if 'running' in nwbfile.processing:
            running_mod = nwbfile.processing['running']
        else:
            running_mod = ProcessingModule('running',
                                           'Running speed processing module')
            nwbfile.add_processing_module(running_mod)

        running_mod.add_data_interface(running_speed_series)

        return nwbfile
Esempio n. 6
0
def add_eye_tracking_ellipse_fit_data_to_nwbfile(nwbfile: pynwb.NWBFile,
                                                 eye_dlc_tracking_data: dict,
                                                 synced_timestamps: pd.Series) -> pynwb.NWBFile:
    eye_tracking_mod = create_eye_tracking_nwb_processing_module(eye_dlc_tracking_data,
                                                                 synced_timestamps)
    nwbfile.add_processing_module(eye_tracking_mod)

    return nwbfile
Esempio n. 7
0
    def test_task_creator_create_task_and_write_to_nwb_successfully(self):
        nwb_content = NWBFile(session_description='demonstrate external files',
                              identifier='NWBE1',
                              session_start_time=datetime(2017,
                                                          4,
                                                          3,
                                                          11,
                                                          tzinfo=tzlocal()),
                              file_create_date=datetime(2017,
                                                        4,
                                                        15,
                                                        12,
                                                        tzinfo=tzlocal()))
        processing_module = ProcessingModule('pm', 'none')

        mock_fl_task_0 = Mock(spec=FlTask)
        mock_fl_task_0.name = 'task_0'
        mock_fl_task_0.description = ''
        mock_fl_task_0.columns = [
            VectorData(name='task_name', description='', data=['Sleep']),
            VectorData(name='task_description',
                       description='',
                       data=['The animal sleeps in a small empty box.']),
            VectorData(name='camera_id', description='', data=[[0]]),
            VectorData(name='task_epochs', description='', data=[[1, 3, 5]]),
        ]
        mock_fl_task_1 = Mock(spec=FlTask)
        mock_fl_task_1.name = 'task_1'
        mock_fl_task_1.description = ''
        mock_fl_task_1.columns = [
            VectorData(name='task_name', description='', data=['Stem+Leaf']),
            VectorData(name='task_description',
                       description='',
                       data=['Spatial Bandit']),
            VectorData(name='camera_id', description='', data=[[1, 2]]),
            VectorData(name='task_epochs', description='', data=[[2, 4]]),
        ]

        task_0 = TaskCreator.create(mock_fl_task_0)
        task_1 = TaskCreator.create(mock_fl_task_1)

        processing_module.add(task_0)
        processing_module.add(task_1)
        nwb_content.add_processing_module(processing_module)

        with NWBHDF5IO(path='task.nwb', mode='w') as nwb_file_io:
            nwb_file_io.write(nwb_content)
            nwb_file_io.close()

        with NWBHDF5IO(path='task.nwb', mode='r') as nwb_file_io:
            nwb_content = nwb_file_io.read()
            self.assertContainerEqual(
                nwb_content.processing['pm'].data_interfaces['task_0'], task_0)
            self.assertContainerEqual(
                nwb_content.processing['pm'].data_interfaces['task_1'], task_1)

        os.remove('task.nwb')
Esempio n. 8
0
def add_csd_to_nwbfile(nwbfile: pynwb.NWBFile,
                       csd: np.ndarray,
                       times: np.ndarray,
                       csd_virt_channel_locs: np.ndarray,
                       csd_unit="V/cm^2",
                       position_unit="um") -> pynwb.NWBFile:
    """Add current source density (CSD) data to an nwbfile

    Parameters
    ----------
    nwbfile : pynwb.NWBFile
        nwbfile to add CSD data to
    csd : np.ndarray
        CSD data in the form of: (channels x timepoints)
    times : np.ndarray
        Timestamps for CSD data (timepoints)
    csd_virt_channel_locs : np.ndarray
        Location of interpolated channels
    csd_unit : str, optional
        Units of CSD data, by default "V/cm^2"
    position_unit : str, optional
        Units of virtual channel locations, by default "um" (micrometer)

    Returns
    -------
    pynwb.NWBFiles
        nwbfile which has had CSD data added
    """

    csd_mod = pynwb.ProcessingModule(
        "current_source_density",
        "Precalculated current source density from interpolated channel locations."
    )
    nwbfile.add_processing_module(csd_mod)

    csd_ts = pynwb.base.TimeSeries(
        name="current_source_density",
        data=csd.
        T,  # TimeSeries should have data in (timepoints x channels) format
        timestamps=times,
        unit=csd_unit)

    x_locs, y_locs = np.split(csd_virt_channel_locs.astype(np.uint64),
                              2,
                              axis=1)

    csd = EcephysCSD(name="ecephys_csd",
                     time_series=csd_ts,
                     virtual_electrode_x_positions=x_locs.flatten(),
                     virtual_electrode_x_positions__unit=position_unit,
                     virtual_electrode_y_positions=y_locs.flatten(),
                     virtual_electrode_y_positions__unit=position_unit)

    csd_mod.add_data_interface(csd)

    return nwbfile
Esempio n. 9
0
    def to_nwb(self, nwbfile: NWBFile) -> NWBFile:
        stimulus_ts = TimeSeries(data=self._value,
                                 name="timestamps",
                                 timestamps=self._value,
                                 unit="s")

        stim_mod = ProcessingModule("stimulus", "Stimulus Times processing")
        stim_mod.add_data_interface(stimulus_ts)
        nwbfile.add_processing_module(stim_mod)

        return nwbfile
Esempio n. 10
0
    def test_processing_module(self):
        start_time = datetime(2020, 1, 29, 11, tzinfo=tzlocal())
        nwbfile = NWBFile(session_description='Test Session',
                          identifier='NWBPM',
                          session_start_time=start_time)

        behavior_module = ProcessingModule(name='behavior',
                                           description='preprocessed behavioral data')
        nwbfile.add_processing_module(behavior_module)

        nwbfile.processing['behavior'].add(self.position)

        processing_module(nwbfile.processing['behavior'], default_neurodata_vis_spec)
Esempio n. 11
0
    def to_nwb(self, nwbfile: NWBFile) -> NWBFile:
        lick_timeseries = TimeSeries(
            name='licks',
            data=self.value['frame'].values,
            timestamps=self.value['timestamps'].values,
            description=('Timestamps and stimulus presentation '
                         'frame indices for lick events'),
            unit='N/A')

        # Add lick interface to nwb file, by way of a processing module:
        licks_mod = ProcessingModule('licking',
                                     'Licking behavior processing module')
        licks_mod.add_data_interface(lick_timeseries)
        nwbfile.add_processing_module(licks_mod)

        return nwbfile
Esempio n. 12
0
    def test_processing_module(self):
        start_time = datetime(2020, 1, 29, 11, tzinfo=tzlocal())
        nwbfile = NWBFile(
            session_description="Test Session",
            identifier="NWBPM",
            session_start_time=start_time,
        )

        behavior_module = ProcessingModule(
            name="behavior", description="preprocessed behavioral data")
        nwbfile.add_processing_module(behavior_module)

        nwbfile.processing["behavior"].add(self.position)

        processing_module(nwbfile.processing["behavior"],
                          default_neurodata_vis_spec)
Esempio n. 13
0
    def to_nwb(self, nwbfile: NWBFile) -> NWBFile:
        reward_volume_ts = TimeSeries(
            name='volume',
            data=self.value['volume'].values,
            timestamps=self.value['timestamps'].values,
            unit='mL')

        autorewarded_ts = TimeSeries(name='autorewarded',
                                     data=self.value['autorewarded'].values,
                                     timestamps=reward_volume_ts.timestamps,
                                     unit='mL')

        rewards_mod = ProcessingModule('rewards',
                                       'Licking behavior processing module')
        rewards_mod.add_data_interface(reward_volume_ts)
        rewards_mod.add_data_interface(autorewarded_ts)
        nwbfile.add_processing_module(rewards_mod)

        return nwbfile
Esempio n. 14
0
    def to_nwb(self, nwbfile: NWBFile) -> NWBFile:
        running_acquisition_df: pd.DataFrame = self.value

        running_dx_series = TimeSeries(
            name='dx',
            data=running_acquisition_df['dx'].values,
            timestamps=running_acquisition_df.index.values,
            unit='cm',
            description=(
                'Running wheel angular change, computed during data collection'
            ))
        v_sig = TimeSeries(
            name='v_sig',
            data=running_acquisition_df['v_sig'].values,
            timestamps=running_acquisition_df.index.values,
            unit='V',
            description='Voltage signal from the running wheel encoder')
        v_in = TimeSeries(
            name='v_in',
            data=running_acquisition_df['v_in'].values,
            timestamps=running_acquisition_df.index.values,
            unit='V',
            description=(
                'The theoretical maximum voltage that the running wheel '
                'encoder will reach prior to "wrapping". This should '
                'theoretically be 5V (after crossing 5V goes to 0V, or '
                'vice versa). In practice the encoder does not always '
                'reach this value before wrapping, which can cause '
                'transient spikes in speed at the voltage "wraps".'))

        if 'running' in nwbfile.processing:
            running_mod = nwbfile.processing['running']
        else:
            running_mod = ProcessingModule('running',
                                           'Running speed processing module')
            nwbfile.add_processing_module(running_mod)

        running_mod.add_data_interface(running_dx_series)
        nwbfile.add_acquisition(v_sig)
        nwbfile.add_acquisition(v_in)

        return nwbfile
Esempio n. 15
0
def add_image_to_nwb(nwbfile: NWBFile, image_data: Image, image_name: str):
    """
    Adds image given by image_data with name image_name to nwbfile

    Parameters
    ----------
    nwbfile
        nwbfile to add image to
    image_data
        The image data
    image_name
        Image name

    Returns
    -------
    None
    """
    module_name = 'ophys'
    description = '{} image at pixels/cm resolution'.format(image_name)

    data, spacing, unit = image_data

    assert spacing[0] == spacing[1] and len(spacing) == 2 and unit == 'mm'

    if module_name not in nwbfile.processing:
        ophys_mod = ProcessingModule(module_name, 'Ophys processing module')
        nwbfile.add_processing_module(ophys_mod)
    else:
        ophys_mod = nwbfile.processing[module_name]

    image = GrayscaleImage(image_name,
                           data,
                           resolution=spacing[0] / 10,
                           description=description)

    if 'images' not in ophys_mod.containers:
        images = Images(name='images')
        ophys_mod.add_data_interface(images)
    else:
        images = ophys_mod['images']
    images.add_image(image)
Esempio n. 16
0
    def to_nwb(self, nwbfile: NWBFile) -> NWBFile:

        # If there is no lick data, do not write
        # anything to the NWB file (this is
        # expected for passive sessions)
        if len(self.value['frame']) == 0:
            return nwbfile

        lick_timeseries = TimeSeries(
            name='licks',
            data=self.value['frame'].values,
            timestamps=self.value['timestamps'].values,
            description=('Timestamps and stimulus presentation '
                         'frame indices for lick events'),
            unit='N/A')

        # Add lick interface to nwb file, by way of a processing module:
        licks_mod = ProcessingModule('licking',
                                     'Licking behavior processing module')
        licks_mod.add_data_interface(lick_timeseries)
        nwbfile.add_processing_module(licks_mod)

        return nwbfile
Esempio n. 17
0
class TestDioManager(unittest.TestCase):

    def setUp(self):
        self.nwb_content = NWBFile(
            session_description='session_description',
            experimenter='experimenter_name',
            lab='lab',
            institution='institution',
            session_start_time=start_time,
            identifier='identifier',
            experiment_description='experiment_description')

        processing_module = ProcessingModule(name='test_processing_module_name', description='test_description')
        self.nwb_content.add_processing_module(processing_module)
        self.behavioral_events = BehavioralEvents(name='test_BehavioralEvents_name')

        self.dio_injector = DioInjector(self.nwb_content)

    def test_dio_injector(self):
        self.dio_injector.inject(
            self.behavioral_events,
            processing_module_name='test_processing_module_name')
        self.assertEqual(self.behavioral_events,
                         self.nwb_content.processing['test_processing_module_name']['test_BehavioralEvents_name'])
Esempio n. 18
0
# the sorting could then be stored in the top-level :py:class:`~pynwb.misc.Units` table (see below).
# Derived preprocessed data should go in a processing module, which you can create using
# :py:meth:`~pynwb.file.NWBFile.create_processing_module`:

behavior_module = nwbfile.create_processing_module(
    name='behavior', description='preprocessed behavioral data')

####################
# or by directly calling the constructor and adding to the :py:class:`~pynwb.file.NWBFile` using
# :py:meth:`~pynwb.file.NWBFile.add_processing_module`:

from pynwb import ProcessingModule

ecephys_module = ProcessingModule(
    name='ecephys', description='preprocessed extracellular electrophysiology')
nwbfile.add_processing_module(ecephys_module)

####################
# Best practice is to use the NWB schema module names as processing module names where appropriate.
# These are: 'behavior', 'ecephys', 'icephys', 'ophys', 'ogen', 'retinotopy', and 'misc'. You may also create
# a processing module with a custom name. Once these processing modules are added, access them with

nwbfile.processing

####################
# which returns a `dict`:
# ::
#
#    {'behavior':
#     behavior <class 'pynwb.base.ProcessingModule'>
#     Fields:
Esempio n. 19
0
# :py:class:`~pynwb.ecephys.EventWaveform`,  :py:class:`~pynwb.ecephys.FeatureExtraction`,
# :py:class:`~pynwb.ecephys.Clustering`, :py:class:`~pynwb.ecephys.ClusterWaveform`.
#
# Processing modules can be created using :py:func:`~pynwb.file.NWBFile.create_processing_module`:

created_mod = nwbfile.create_processing_module('created_mod', 'PyNWB tutorial',
                                               'example module')

####################
# or by directly calling the constructor and adding to the :py:class:`~pynwb.file.NWBFile` using
# :py:func:`~pynwb.file.NWBFile.add_processing_module`:

from pynwb import ProcessingModule

added_mod = ProcessingModule('added_mod', 'PyNWB tutorial', 'example module')
nwbfile.add_processing_module(added_mod)

####################
# You can add data to your processing module using the method
# :py:func:`~pynwb.base.ProcessingModule.add_data_interface`.
# Lets make another :py:class:`~pynwb.base.TimeSeries` and then add it to the
# :py:class:`~pynwb.base.ProcessingModule` we just added.

data = list(range(0, 100, 10))
timestamps = list(range(10))
mod_ts = TimeSeries('ts_for_mod',
                    'PyNWB tutorial',
                    data,
                    'SIunit',
                    timestamps=timestamps)
added_mod.add_data_interface(mod_ts)
Esempio n. 20
0
def nwb_copy_file(old_file, new_file, cp_objs={}, save_to_file=True):
    """
    Copy fields defined in 'obj', from existing NWB file to new NWB file.

    Parameters
    ----------
    old_file : str, path, nwbfile
        String or path to nwb file '/path/to/old_file.nwb'. Alternatively, the
        nwbfile object.
    new_file : str, path
        String such as '/path/to/new_file.nwb'.
    cp_objs : dict
        Name:Value pairs (Group:Children) listing the groups and respective
        children from the current NWB file to be copied. Children can be:
        - Boolean, indicating an attribute (e.g. for institution, lab)
        - List of strings, containing several children names
        Example:
        {'institution':True,
         'lab':True,
         'acquisition':['microphone'],
         'ecephys':['LFP','DecompositionSeries']}
    save_to_file: Boolean
        If True, saves directly to new_file.nwb. If False, only returns nwb_new.

    Returns:
    --------
    nwb_new : nwbfile object
    """

    manager = get_manager()

    # Get from nwbfile object in memory or from file
    if isinstance(old_file, NWBFile):
        nwb_old = old_file
        io1 = False
    else:
        io1 = NWBHDF5IO(str(old_file),
                        'r',
                        manager=manager,
                        load_namespaces=True)
        nwb_old = io1.read()

    # Creates new file
    nwb_new = NWBFile(
        session_description=str(nwb_old.session_description),
        identifier=id_generator(),
        session_start_time=nwb_old.session_start_time,
    )
    with NWBHDF5IO(new_file, mode='w', manager=manager,
                   load_namespaces=False) as io2:
        # Institution name ------------------------------------------------
        if 'institution' in cp_objs:
            nwb_new.institution = str(nwb_old.institution)

        # Lab name --------------------------------------------------------
        if 'lab' in cp_objs:
            nwb_new.lab = str(nwb_old.lab)

        # Session id ------------------------------------------------------
        if 'session' in cp_objs:
            nwb_new.session_id = nwb_old.session_id

        # Devices ---------------------------------------------------------
        if 'devices' in cp_objs:
            for aux in list(nwb_old.devices.keys()):
                dev = Device(nwb_old.devices[aux].name)
                nwb_new.add_device(dev)

        # Electrode groups ------------------------------------------------
        if 'electrode_groups' in cp_objs and nwb_old.electrode_groups is not None:
            for aux in list(nwb_old.electrode_groups.keys()):
                nwb_new.create_electrode_group(
                    name=str(nwb_old.electrode_groups[aux].name),
                    description=str(nwb_old.electrode_groups[aux].description),
                    location=str(nwb_old.electrode_groups[aux].location),
                    device=nwb_new.get_device(
                        nwb_old.electrode_groups[aux].device.name))

        # Electrodes ------------------------------------------------------
        if 'electrodes' in cp_objs and nwb_old.electrodes is not None:
            nElec = len(nwb_old.electrodes['x'].data[:])
            for aux in np.arange(nElec):
                nwb_new.add_electrode(
                    x=nwb_old.electrodes['x'][aux],
                    y=nwb_old.electrodes['y'][aux],
                    z=nwb_old.electrodes['z'][aux],
                    imp=nwb_old.electrodes['imp'][aux],
                    location=str(nwb_old.electrodes['location'][aux]),
                    filtering=str(nwb_old.electrodes['filtering'][aux]),
                    group=nwb_new.get_electrode_group(
                        nwb_old.electrodes['group'][aux].name),
                    group_name=str(nwb_old.electrodes['group_name'][aux]))
            # if there are custom variables
            new_vars = list(nwb_old.electrodes.colnames)
            default_vars = [
                'x', 'y', 'z', 'imp', 'location', 'filtering', 'group',
                'group_name'
            ]
            [new_vars.remove(var) for var in default_vars]
            for var in new_vars:
                if var == 'label':
                    var_data = [
                        str(elem) for elem in nwb_old.electrodes[var].data[:]
                    ]
                else:
                    var_data = np.array(nwb_old.electrodes[var].data[:])

                nwb_new.add_electrode_column(
                    name=str(var),
                    description=str(nwb_old.electrodes[var].description),
                    data=var_data)

            # If Bipolar scheme for electrodes
            for v in nwb_old.lab_meta_data.values():
                if isinstance(v, EcephysExt) and hasattr(
                        v, 'bipolar_scheme_table'):
                    bst_old = v.bipolar_scheme_table
                    bst_new = BipolarSchemeTable(
                        name=bst_old.name, description=bst_old.description)
                    ecephys_ext = EcephysExt(name=v.name)
                    ecephys_ext.bipolar_scheme_table = bst_new
                    nwb_new.add_lab_meta_data(ecephys_ext)

        # Epochs ----------------------------------------------------------
        if 'epochs' in cp_objs and nwb_old.epochs is not None:
            nEpochs = len(nwb_old.epochs['start_time'].data[:])
            for i in np.arange(nEpochs):
                nwb_new.add_epoch(
                    start_time=nwb_old.epochs['start_time'].data[i],
                    stop_time=nwb_old.epochs['stop_time'].data[i])
            # if there are custom variables
            new_vars = list(nwb_old.epochs.colnames)
            default_vars = ['start_time', 'stop_time', 'tags', 'timeseries']
            [new_vars.remove(var) for var in default_vars if var in new_vars]
            for var in new_vars:
                nwb_new.add_epoch_column(
                    name=var,
                    description=nwb_old.epochs[var].description,
                    data=nwb_old.epochs[var].data[:])

        # Invalid times ---------------------------------------------------
        if 'invalid_times' in cp_objs and nwb_old.invalid_times is not None:
            nInvalid = len(nwb_old.invalid_times['start_time'][:])
            for aux in np.arange(nInvalid):
                nwb_new.add_invalid_time_interval(
                    start_time=nwb_old.invalid_times['start_time'][aux],
                    stop_time=nwb_old.invalid_times['stop_time'][aux])

        # Trials ----------------------------------------------------------
        if 'trials' in cp_objs and nwb_old.trials is not None:
            nTrials = len(nwb_old.trials['start_time'])
            for aux in np.arange(nTrials):
                nwb_new.add_trial(start_time=nwb_old.trials['start_time'][aux],
                                  stop_time=nwb_old.trials['stop_time'][aux])
            # if there are custom variables
            new_vars = list(nwb_old.trials.colnames)
            default_vars = ['start_time', 'stop_time']
            [new_vars.remove(var) for var in default_vars]
            for var in new_vars:
                nwb_new.add_trial_column(
                    name=var,
                    description=nwb_old.trials[var].description,
                    data=nwb_old.trials[var].data[:])

        # Intervals -------------------------------------------------------
        if 'intervals' in cp_objs and nwb_old.intervals is not None:
            all_objs_names = list(nwb_old.intervals.keys())
            for obj_name in all_objs_names:
                obj_old = nwb_old.intervals[obj_name]
                # create and add TimeIntervals
                obj = TimeIntervals(name=obj_old.name,
                                    description=obj_old.description)
                nInt = len(obj_old['start_time'])
                for ind in np.arange(nInt):
                    obj.add_interval(start_time=obj_old['start_time'][ind],
                                     stop_time=obj_old['stop_time'][ind])
                # Add to file
                nwb_new.add_time_intervals(obj)

        # Stimulus --------------------------------------------------------
        if 'stimulus' in cp_objs:
            all_objs_names = list(nwb_old.stimulus.keys())
            for obj_name in all_objs_names:
                obj_old = nwb_old.stimulus[obj_name]
                obj = TimeSeries(name=obj_old.name,
                                 description=obj_old.description,
                                 data=obj_old.data[:],
                                 rate=obj_old.rate,
                                 resolution=obj_old.resolution,
                                 conversion=obj_old.conversion,
                                 starting_time=obj_old.starting_time,
                                 unit=obj_old.unit)
                nwb_new.add_stimulus(obj)

        # Processing modules ----------------------------------------------
        if 'ecephys' in cp_objs:
            interfaces = [
                nwb_old.processing['ecephys'].data_interfaces[key]
                for key in cp_objs['ecephys']
            ]
            # Add ecephys module to NWB file
            ecephys_module = ProcessingModule(
                name='ecephys',
                description='Extracellular electrophysiology data.')
            nwb_new.add_processing_module(ecephys_module)
            for interface_old in interfaces:
                obj = copy_obj(interface_old, nwb_old, nwb_new)
                if obj is not None:
                    ecephys_module.add_data_interface(obj)

        if 'behavior' in cp_objs:
            interfaces = [
                nwb_old.processing['behavior'].data_interfaces[key]
                for key in cp_objs['behavior']
            ]
            if 'behavior' not in nwb_new.processing:
                # Add behavior module to NWB file
                behavior_module = ProcessingModule(
                    name='behavior', description='behavioral data.')
                nwb_new.add_processing_module(behavior_module)
            for interface_old in interfaces:
                obj = copy_obj(interface_old, nwb_old, nwb_new)
                if obj is not None:
                    behavior_module.add_data_interface(obj)

        # Acquisition -----------------------------------------------------
        # Can get raw ElecetricalSeries and Mic recording
        if 'acquisition' in cp_objs:
            for acq_name in cp_objs['acquisition']:
                obj_old = nwb_old.acquisition[acq_name]
                acq = copy_obj(obj_old, nwb_old, nwb_new)
                nwb_new.add_acquisition(acq)

        # Surveys ---------------------------------------------------------
        if 'surveys' in cp_objs and 'behavior' in nwb_old.processing:
            surveys_list = [
                v for v in
                nwb_old.processing['behavior'].data_interfaces.values()
                if v.neurodata_type == 'SurveyTable'
            ]
            if cp_objs['surveys'] and len(surveys_list) > 0:
                if 'behavior' not in nwb_new.processing:
                    # Add behavior module to NWB file
                    behavior_module = ProcessingModule(
                        name='behavior', description='behavioral data.')
                    nwb_new.add_processing_module(behavior_module)
                for obj_old in surveys_list:
                    srv = copy_obj(obj_old, nwb_old, nwb_new)
                    behavior_module.add_data_interface(srv)

        # Subject ---------------------------------------------------------
        if nwb_old.subject is not None:
            if 'subject' in cp_objs:
                try:
                    cortical_surfaces = CorticalSurfaces()
                    surfaces = nwb_old.subject.cortical_surfaces.surfaces
                    for sfc in list(surfaces.keys()):
                        cortical_surfaces.create_surface(
                            name=surfaces[sfc].name,
                            faces=surfaces[sfc].faces,
                            vertices=surfaces[sfc].vertices)
                    nwb_new.subject = ECoGSubject(
                        cortical_surfaces=cortical_surfaces,
                        subject_id=nwb_old.subject.subject_id,
                        age=nwb_old.subject.age,
                        description=nwb_old.subject.description,
                        genotype=nwb_old.subject.genotype,
                        sex=nwb_old.subject.sex,
                        species=nwb_old.subject.species,
                        weight=nwb_old.subject.weight,
                        date_of_birth=nwb_old.subject.date_of_birth)
                except:
                    nwb_new.subject = Subject(**nwb_old.subject.fields)

        # Write new file with copied fields
        if save_to_file:
            io2.write(nwb_new, link_data=False)

    # Close old file and return new nwbfile object
    if io1:
        io1.close()

    return nwb_new
Esempio n. 21
0
def no2nwb(NOData, session_use, subjects):

    # Prepare the NO data that will be coverted to the NWB format

    session = NOData.sessions[session_use]
    events = NOData._get_event_data(session_use, experiment_type='All')
    cell_ids = NOData.ls_cells(session_use)
    experiment_id_learn = session['experiment_id_learn']
    experiment_id_recog = session['experiment_id_recog']
    task_descr = session['task_descr']

    # Get the metadata for the subject
    df_session = subjects[subjects['session_id'] == session_use]

    print('session_use')
    print(session_use)
    print('age')
    print(str(df_session['age'].values[0]))
    print('epilepsy_diagnosis')
    print(str(df_session['epilepsy_diagnosis'].values[0]))

    nwb_subject = Subject(
        age=str(df_session['age'].values[0]),
        description=df_session['epilepsy_diagnosis'].values[0],
        sex=df_session['sex'].values[0],
        subject_id=df_session['subject_id'].values[0])

    # Create the NWB file
    nwbfile = NWBFile(
        #source='https://datadryad.org/bitstream/handle/10255/dryad.163179/RecogMemory_MTL_release_v2.zip',
        session_description='RecogMemory dataset session use 5' + session['session'],
        identifier=session['session_id'],
        session_start_time=datetime.datetime.now(),# TODO: need to check out the time for session start
        file_create_date=datetime.datetime.now(),
        experiment_description="learning: " + str(experiment_id_learn) + ", " + \
                               "recognition: " + \
                               str(experiment_id_recog),
        subject=nwb_subject
    )

    # Add event and experiment_id acquisition
    # event_ts = TimeSeries(name='events', source='NA', unit='NA', data=np.asarray(events[1].values),
    #                       timestamps=np.asarray(events[0].values))

    event_ts = TimeSeries(name='events',
                          unit='NA',
                          data=np.asarray(events[1].values),
                          timestamps=np.asarray(events[0].values))
    # experiment_ids = TimeSeries(name='experiment_ids', source='NA', unit='NA', data=np.asarray(events[2]),
    #                             timestamps=np.asarray(events[0].values))
    experiment_ids = TimeSeries(name='experiment_ids',
                                unit='NA',
                                data=np.asarray(events[2]),
                                timestamps=np.asarray(events[0].values))
    nwbfile.add_acquisition(event_ts)
    nwbfile.add_acquisition(experiment_ids)

    # Add stimuli to the NWB file2
    # Get the first cell from the cell list
    cell = NOData.pop_cell(session_use, NOData.ls_cells(session_use)[0])
    trials = cell.trials
    stimuli_recog_path = [trial.file_path_recog for trial in trials]
    stimuli_learn_path = [trial.file_path_learn for trial in trials]

    # Add stimuli recog
    counter = 1
    for path in stimuli_recog_path:
        folders = path.split('\\')
        path = os.path.join('./RecogMemory_MTL_release_v2', 'Stimuli',
                            folders[0], folders[1], folders[2])
        img = cv2.imread(path)
        name = 'stimuli_recog_' + str(counter)
        stimulus_recog = ImageSeries(name=name,
                                     data=img,
                                     unit='NA',
                                     format='',
                                     timestamps=[0.0])

        nwbfile.add_stimulus(stimulus_recog)
        counter += 1

    # Add stimuli learn
    counter = 1
    for path in stimuli_learn_path:
        if path == 'NA':
            continue
        folders = path.split('\\')

        path = os.path.join('./RecogMemory_MTL_release_v2', 'Stimuli',
                            folders[0], folders[1], folders[2])
        img = cv2.imread(path)

        name = 'stimuli_learn_' + str(counter)

        stimulus_learn = ImageSeries(name=name,
                                     data=img,
                                     unit='NA',
                                     format='',
                                     timestamps=[0.0])

        nwbfile.add_stimulus(stimulus_learn)

        counter += 1

    # Add epochs and trials: storing start and end times for a stimulus

    # First extract the category ids and names that we need
    # The metadata for each trials will be store in a trial table

    cat_id_recog = [trial.category_recog for trial in trials]
    cat_name_recog = [trial.category_name_recog for trial in trials]
    cat_id_learn = [trial.category_learn for trial in trials]
    cat_name_learn = [trial.category_name_learn for trial in trials]

    # Extract the event timestamps
    events_learn_stim_on = events[(events[2] == experiment_id_learn) &
                                  (events[1] == NOData.markers['stimulus_on'])]
    events_learn_stim_off = events[(events[2] == experiment_id_learn) & (
        events[1] == NOData.markers['stimulus_off'])]
    events_learn_delay1_off = events[(events[2] == experiment_id_learn) & (
        events[1] == NOData.markers['delay1_off'])]
    events_learn_delay2_off = events[(events[2] == experiment_id_learn) & (
        events[1] == NOData.markers['delay2_off'])]

    events_recog_stim_on = events[(events[2] == experiment_id_recog) &
                                  (events[1] == NOData.markers['stimulus_on'])]
    events_recog_stim_off = events[(events[2] == experiment_id_recog) & (
        events[1] == NOData.markers['stimulus_off'])]
    events_recog_delay1_off = events[(events[2] == experiment_id_recog) & (
        events[1] == NOData.markers['delay1_off'])]
    events_recog_delay2_off = events[(events[2] == experiment_id_recog) & (
        events[1] == NOData.markers['delay2_off'])]

    # Extract new_old label
    new_old_recog = [trial.new_old_recog for trial in trials]

    # Create the trial tables
    nwbfile.add_trial_column('stim_on', 'the time when the stimulus is shown')
    nwbfile.add_trial_column('stim_off', 'the time when the stimulus is off')
    nwbfile.add_trial_column('delay1_off', 'the time when delay1 is off')
    nwbfile.add_trial_column('delay2_off', 'the time when delay2 is off')
    nwbfile.add_trial_column('stim_phase',
                             'learning/recognition phase during the trial')
    nwbfile.add_trial_column('category_id', 'the category id of the stimulus')
    nwbfile.add_trial_column('category_name',
                             'the category name of the stimulus')
    nwbfile.add_trial_column('external_image_file',
                             'the file path to the stimulus')
    nwbfile.add_trial_column('new_old_labels_recog',
                             'labels for new or old stimulus')

    range_recog = np.amin([
        len(events_recog_stim_on),
        len(events_recog_stim_off),
        len(events_recog_delay1_off),
        len(events_recog_delay2_off)
    ])
    range_learn = np.amin([
        len(events_learn_stim_on),
        len(events_learn_stim_off),
        len(events_learn_delay1_off),
        len(events_learn_delay2_off)
    ])

    # Iterate the event list and add information into each epoch and trial table
    for i in range(range_learn):
        # nwbfile.create_epoch(start_time=events_learn_stim_on.iloc[i][0],
        #                      stop_time=events_learn_stim_off.iloc[i][0],
        #                      timeseries=[event_ts, experiment_ids],
        #                      tags='stimulus_learn',
        #                      description='learning phase stimulus')

        # nwbfile.add_trial({'start': events_learn_stim_on.iloc[i][0],
        #                    'end': events_learn_delay2_off.iloc[i][0],
        #                    'stim_on': events_learn_stim_on.iloc[i][0],
        #                    'stim_off': events_learn_stim_off.iloc[i][0],
        #                    'delay1_off': events_learn_delay1_off.iloc[i][0],
        #                    'delay2_off': events_learn_delay2_off.iloc[i][0],
        #                    'stim_phase': 'learn',
        #                    'category_id': cat_id_learn[i],
        #                    'category_name': cat_name_learn[i],
        #                    'external_image_file': stimuli_learn_path[i],
        #                    'new_old_labels_recog': -1})

        nwbfile.add_trial(start_time=events_learn_stim_on.iloc[i][0],
                          stop_time=events_learn_delay2_off.iloc[i][0],
                          stim_on=events_learn_stim_on.iloc[i][0],
                          stim_off=events_learn_stim_off.iloc[i][0],
                          delay1_off=events_learn_delay1_off.iloc[i][0],
                          delay2_off=events_learn_delay2_off.iloc[i][0],
                          stim_phase='learn',
                          category_id=cat_id_learn[i],
                          category_name=cat_name_learn[i],
                          external_image_file=stimuli_learn_path[i],
                          new_old_labels_recog='NA')

    for i in range(range_recog):
        # nwbfile.create_epoch(start_time=events_recog_stim_on.iloc[i][0],
        #                      stop_time=events_recog_stim_off.iloc[i][0],
        #                      timeseries=[event_ts, experiment_ids],
        #                      tags='stimulus_recog',
        #                      description='recognition phase stimulus')

        nwbfile.add_trial(start_time=events_recog_stim_on.iloc[i][0],
                          stop_time=events_recog_delay2_off.iloc[i][0],
                          stim_on=events_recog_stim_on.iloc[i][0],
                          stim_off=events_recog_stim_off.iloc[i][0],
                          delay1_off=events_recog_delay1_off.iloc[i][0],
                          delay2_off=events_recog_delay2_off.iloc[i][0],
                          stim_phase='recog',
                          category_id=cat_id_recog[i],
                          category_name=cat_name_recog[i],
                          external_image_file=stimuli_recog_path[i],
                          new_old_labels_recog=new_old_recog[i])

    # Add the waveform clustering and the spike data.
    # Create necessary processing modules for different kinds of waveform data
    clustering_processing_module = ProcessingModule(
        'Spikes', 'The spike data contained')
    clusterWaveform_learn_processing_module = ProcessingModule(
        'MeanWaveforms_learn',
        'The mean waveforms for the clustered raw signal for learning phase')
    clusterWaveform_recog_processing_module = ProcessingModule(
        'MeanWaveforms_recog',
        'The mean waveforms for the clustered raw signal for recognition phase'
    )
    IsolDist_processing_module = ProcessingModule('IsoDist', 'The IsolDist')
    SNR_processing_module = ProcessingModule('SNR', 'SNR (signal-to-noise)')
    # Get the unique channel id that we will be iterate over
    channel_ids = np.unique([cell_id[0] for cell_id in cell_ids])

    # Interate the channel list
    for channel_id in channel_ids:
        cell_name = 'A' + str(channel_id) + '_cells.mat'
        file_path = os.path.join('RecogMemory_MTL_release_v2', 'Data',
                                 'sorted', session['session'], task_descr,
                                 cell_name)
        try:
            cell_mat = loadmat(file_path)
        except FileNotFoundError:
            print("File not found")
            continue
        spikes = cell_mat['spikes']
        meanWaveform_recog = cell_mat['meanWaveform_recog']
        meanWaveform_learn = cell_mat['meanWaveform_learn']
        IsolDist_SNR = cell_mat['IsolDist_SNR']

        spike_id = np.asarray([spike[0] for spike in spikes])
        spike_cluster_id = np.asarray([spike[1] for spike in spikes])
        spike_timestamps = np.asarray([spike[2] / 1000000 for spike in spikes])
        clustering = Clustering(description='Spikes of the channel detected',
                                num=spike_id,
                                peak_over_rms=np.asarray([0]),
                                times=spike_timestamps,
                                name='channel' + str(channel_id))
        clustering_processing_module.add_data_interface(clustering)

        for i in range(len(meanWaveform_learn[0][0][0][0])):
            waveform_mean_learn = ClusterWaveforms(
                clustering_interface=clustering,
                waveform_filtering='NA',
                waveform_sd=np.asarray([[0]]),
                waveform_mean=np.asarray([meanWaveform_learn[0][0][1][i]]),
                name='waveform_learn_cluster_id_' +
                str(meanWaveform_learn[0][0][0][0][i]))
            try:
                clusterWaveform_learn_processing_module.add_data_interface(
                    waveform_mean_learn)
            except ValueError as e:
                print(
                    'Catch an error in adding waveform interface to the recog processing module:'
                    + str(e))
                continue

        # Adding mean waveform recognition into the processing module
        for i in range(len(meanWaveform_recog[0][0][0][0])):
            waveform_mean_recog = ClusterWaveforms(
                clustering_interface=clustering,
                waveform_filtering='NA',
                waveform_sd=np.asarray([[0]]),
                waveform_mean=np.asarray([meanWaveform_recog[0][0][1][i]]),
                name='waveform_recog_cluster_id_' +
                str(meanWaveform_recog[0][0][0][0][i]))
            try:
                clusterWaveform_recog_processing_module.add_data_interface(
                    waveform_mean_recog)
            except ValueError as e:
                print(
                    'Catch an error in adding waveform interface to the recog processing module:'
                    + str(e))
                continue

        # Adding IsolDist_SNR data into the processing module
        # Here I use feature extraction to store the IsolDist_SNR data because
        # they are extracted from the original signals.
        # print(IsolDist_SNR[0][0][0])
        for i in range(len(IsolDist_SNR[0][0][1][0])):
            isoldist_data_interface = TimeSeries(
                data=[IsolDist_SNR[0][0][1][0][i]],
                unit='NA',
                timestamps=[0],
                name='IsolDist_' + str(IsolDist_SNR[0][0][0][0][i]))
            try:
                IsolDist_processing_module.add_data_interface(
                    isoldist_data_interface)
            except ValueError as e:
                print(
                    'Catch an error in adding IsolDist to the processing module:'
                    + str(e))
                continue

            SNR_data_interface = TimeSeries(unit='NA',
                                            description='The SNR data',
                                            data=[IsolDist_SNR[0][0][2][0][i]],
                                            timestamps=[0],
                                            name='SNR_' +
                                            str(IsolDist_SNR[0][0][0][0][i]))

            try:
                SNR_processing_module.add_data_interface(SNR_data_interface)
            except ValueError as e:
                print(
                    'Catch an error in adding SNR to the processing module:' +
                    str(e))
                continue

    nwbfile.add_processing_module(clustering_processing_module)
    nwbfile.add_processing_module(clusterWaveform_learn_processing_module)
    nwbfile.add_processing_module(clusterWaveform_recog_processing_module)
    nwbfile.add_processing_module(IsolDist_processing_module)
    nwbfile.add_processing_module(SNR_processing_module)

    return nwbfile
Esempio n. 22
0
def add_cell_specimen_table(nwbfile: NWBFile,
                            cell_specimen_table: pd.DataFrame):
    """
    This function takes the cell specimen table and writes the ROIs
    contained within. It writes these to a new NWB imaging plane
    based off the previously supplied metadata
    Parameters
    ----------
    nwbfile: NWBFile
        this is the in memory NWBFile currently being written to which ROI data
        is added
    cell_specimen_table: pd.DataFrame
        this is the DataFrame containing the cells segmented from a ophys
        experiment, stored in json file and loaded.
        example: /home/nicholasc/projects/allensdk/allensdk/test/
                 brain_observatory/behavior/cell_specimen_table_789359614.json

    Returns
    -------
    nwbfile: NWBFile
        The altered in memory NWBFile object that now has a specimen table
    """
    cell_roi_table = cell_specimen_table.reset_index().set_index('cell_roi_id')

    # Device:
    device_name = nwbfile.lab_meta_data['metadata'].rig_name
    nwbfile.create_device(device_name,
                          "Allen Brain Observatory")
    device = nwbfile.get_device(device_name)

    # Location:
    location_description = "Area: {}, Depth: {} um".format(
        nwbfile.lab_meta_data['metadata'].targeted_structure,
        nwbfile.lab_meta_data['metadata'].imaging_depth)

    # FOV:
    fov_width = nwbfile.lab_meta_data['metadata'].field_of_view_width
    fov_height = nwbfile.lab_meta_data['metadata'].field_of_view_height
    imaging_plane_description = "{} field of view in {} at depth {} um".format(
        (fov_width, fov_height),
        nwbfile.lab_meta_data['metadata'].targeted_structure,
        nwbfile.lab_meta_data['metadata'].imaging_depth)

    # Optical Channel:
    optical_channel = OpticalChannel(
        name='channel_1',
        description='2P Optical Channel',
        emission_lambda=nwbfile.lab_meta_data['metadata'].emission_lambda)

    # Imaging Plane:
    imaging_plane = nwbfile.create_imaging_plane(
        name='imaging_plane_1',
        optical_channel=optical_channel,
        description=imaging_plane_description,
        device=device,
        excitation_lambda=nwbfile.lab_meta_data['metadata'].excitation_lambda,
        imaging_rate=nwbfile.lab_meta_data['metadata'].ophys_frame_rate,
        indicator=nwbfile.lab_meta_data['metadata'].indicator,
        location=location_description,
        manifold=[],  # Should this be passed in for future support?
        conversion=1.0,
        unit='unknown',  # Should this be passed in for future support?
        reference_frame='unknown')  # Should this be passed in for future support?

    # Image Segmentation:
    image_segmentation = ImageSegmentation(name="image_segmentation")

    if 'two_photon_imaging' not in nwbfile.modules:
        two_photon_imaging_module = ProcessingModule('two_photon_imaging', '2P processing module')
        nwbfile.add_processing_module(two_photon_imaging_module)
    else:
        two_photon_imaging_module = nwbfile.modules['two_photon_imaging']

    two_photon_imaging_module.add_data_interface(image_segmentation)

    # Plane Segmentation:
    plane_segmentation = image_segmentation.create_plane_segmentation(
        name='cell_specimen_table',
        description="Segmented rois",
        imaging_plane=imaging_plane)

    for col_name in cell_roi_table.columns:
        # the columns 'image_mask', 'pixel_mask', and 'voxel_mask' are already defined
        # in the nwb.ophys::PlaneSegmentation Object
        if col_name not in ['id', 'mask_matrix', 'image_mask', 'pixel_mask', 'voxel_mask']:
            # This builds the columns with name of column and description of column
            # both equal to the column name in the cell_roi_table
            plane_segmentation.add_column(col_name,
                                          CELL_SPECIMEN_COL_DESCRIPTIONS.get(col_name,
                                                                             "No Description Available"))

    # go through each roi and add it to the plan segmentation object
    for cell_roi_id, row in cell_roi_table.iterrows():
        sub_mask = np.array(row.pop('image_mask'))
        curr_roi = roi.create_roi_mask(fov_width, fov_height, [(fov_width - 1), 0, (fov_height - 1), 0],
                                       roi_mask=sub_mask)
        mask = curr_roi.get_mask_plane()
        csid = row.pop('cell_specimen_id')
        row['cell_specimen_id'] = -1 if csid is None else csid
        row['id'] = cell_roi_id
        plane_segmentation.add_roi(image_mask=mask, **row.to_dict())

    return nwbfile
Esempio n. 23
0
    def to_nwb(self, nwbfile: NWBFile,
               ophys_timestamps: OphysTimestamps) -> NWBFile:
        """
        :param nwbfile
            In-memory nwb file object
        :param ophys_timestamps
            ophys timestamps
        """
        # 1. Add cell specimen table
        cell_roi_table = self.table.reset_index().set_index('cell_roi_id')
        metadata = nwbfile.lab_meta_data['metadata']

        device = nwbfile.get_device()

        # FOV:
        fov_width = metadata.field_of_view_width
        fov_height = metadata.field_of_view_height
        imaging_plane_description = \
            "{} field of view in {} at depth {} " \
            "um".format(
                (fov_width, fov_height),
                self._meta.imaging_plane.targeted_structure,
                metadata.imaging_depth)

        # Optical Channel:
        optical_channel = OpticalChannel(
            name='channel_1',
            description='2P Optical Channel',
            emission_lambda=self._meta.emission_lambda)

        # Imaging Plane:
        imaging_plane = nwbfile.create_imaging_plane(
            name='imaging_plane_1',
            optical_channel=optical_channel,
            description=imaging_plane_description,
            device=device,
            excitation_lambda=self._meta.imaging_plane.excitation_lambda,
            imaging_rate=self._meta.imaging_plane.ophys_frame_rate,
            indicator=self._meta.imaging_plane.indicator,
            location=self._meta.imaging_plane.targeted_structure)

        # Image Segmentation:
        image_segmentation = ImageSegmentation(name="image_segmentation")

        if 'ophys' not in nwbfile.processing:
            ophys_module = ProcessingModule('ophys', 'Ophys processing module')
            nwbfile.add_processing_module(ophys_module)
        else:
            ophys_module = nwbfile.processing['ophys']

        ophys_module.add_data_interface(image_segmentation)

        # Plane Segmentation:
        plane_segmentation = image_segmentation.create_plane_segmentation(
            name='cell_specimen_table',
            description="Segmented rois",
            imaging_plane=imaging_plane)

        for col_name in cell_roi_table.columns:
            # the columns 'roi_mask', 'pixel_mask', and 'voxel_mask' are
            # already defined in the nwb.ophys::PlaneSegmentation Object
            if col_name not in [
                    'id', 'mask_matrix', 'roi_mask', 'pixel_mask', 'voxel_mask'
            ]:
                # This builds the columns with name of column and description
                # of column both equal to the column name in the cell_roi_table
                plane_segmentation.add_column(
                    col_name,
                    CELL_SPECIMEN_COL_DESCRIPTIONS.get(
                        col_name, "No Description Available"))

        # go through each roi and add it to the plan segmentation object
        for cell_roi_id, table_row in cell_roi_table.iterrows():
            # NOTE: The 'roi_mask' in this cell_roi_table has already been
            # processing by the function from
            # allensdk.brain_observatory.behavior.session_apis.data_io
            # .ophys_lims_api
            # get_cell_specimen_table() method. As a result, the ROI is
            # stored in
            # an array that is the same shape as the FULL field of view of the
            # experiment (e.g. 512 x 512).
            mask = table_row.pop('roi_mask')

            csid = table_row.pop('cell_specimen_id')
            table_row['cell_specimen_id'] = -1 if csid is None else csid
            table_row['id'] = cell_roi_id
            plane_segmentation.add_roi(image_mask=mask, **table_row.to_dict())

        # 2. Add DFF traces
        self._dff_traces.to_nwb(nwbfile=nwbfile,
                                ophys_timestamps=ophys_timestamps)

        # 3. Add Corrected fluorescence traces
        self._corrected_fluorescence_traces.to_nwb(nwbfile=nwbfile)

        # 4. Add events
        self._events.to_nwb(nwbfile=nwbfile)

        # 5. Add segmentation mask image
        add_image_to_nwb(nwbfile=nwbfile,
                         image_data=self._segmentation_mask_image,
                         image_name='segmentation_mask_image')

        return nwbfile
Esempio n. 24
0
class TestNWBFileReading(TestCase):
    def setUp(self):
        start_time = datetime(2017, 4, 3, 11, tzinfo=tzlocal())
        create_date = datetime(2017, 4, 15, 12, tzinfo=tzlocal())
        self.nwb_file_content = NWBFile(
            session_description='demonstrate NWBFile basics',
            identifier='nwb_file',
            session_start_time=start_time,
            file_create_date=create_date)

    def test_read_nwb_header_device_successfully(self):
        header_device = HeaderDevice(
            name='header_device',
            headstage_serial='Sample headstage_serial',
            headstage_smart_ref_on='Sample headstage_smart_ref_on',
            realtime_mode='Sample realtime_mode',
            headstage_auto_settle_on='Sample headstage_auto_settle_on',
            timestamp_at_creation='Sample timestamp_at_creation',
            controller_firmware_version='Sample controller_firmware_version',
            controller_serial='Sample controller_serial',
            save_displayed_chan_only='Sample save_displayed_chan_only',
            headstage_firmware_version='Sample headstage_firmware_version',
            qt_version='Sample qt_version',
            compile_date='Sample compile_date',
            compile_time='Sample compile_time',
            file_prefix='Sample file_prefix',
            headstage_gyro_sensor_on='Sample headstage_gyro_sensor_on',
            headstage_mag_sensor_on='Sample headstage_mag_sensor_on',
            trodes_version='Sample trodes_version',
            headstage_accel_sensor_on='Sample headstage_accel_sensor_on',
            commit_head='Sample commit_head',
            system_time_at_creation='Sample system_time_at_creation',
            file_path='Sample file_path',
        )

        self.nwb_file_content.add_device(header_device)
        nwb_file_handler = NWBHDF5IO('header_device.nwb', mode='w')
        nwb_file_handler.write(self.nwb_file_content)
        nwb_file_handler.close()

        self.assertTrue(os.path.exists('header_device.nwb'))
        with pynwb.NWBHDF5IO('header_device.nwb', 'r') as nwb_file_handler:
            nwb_file = nwb_file_handler.read()
            self.assertEqual(nwb_file.devices['header_device'].commit_head,
                             header_device.commit_head)

        self.delete_nwb('header_device')

    def test_read_nwb_probe_successfully(self):
        shanks_electrode = ShanksElectrode(name='electrode_shank',
                                           rel_x=1.0,
                                           rel_y=2.0,
                                           rel_z=3.0)
        shank = Shank(name='shank')
        shank.add_shanks_electrode(shanks_electrode)
        probe = Probe(name='probe',
                      units='mm',
                      id=1,
                      probe_type='type_1',
                      probe_description='2',
                      contact_size=1.0,
                      contact_side_numbering=False)
        probe.add_shank(shank)
        self.nwb_file_content.add_device(probe)

        nwb_file_handler = NWBHDF5IO('probe.nwb', mode='w')
        nwb_file_handler.write(self.nwb_file_content)
        nwb_file_handler.close()

        self.assertTrue(os.path.exists('probe.nwb'))
        with pynwb.NWBHDF5IO('probe.nwb', 'r',
                             load_namespaces=True) as nwb_file_handler:
            nwb_file = nwb_file_handler.read()
            self.assertContainerEqual(nwb_file.devices['probe'], probe)

        self.delete_nwb('probe')

    def test_read_nwb_data_acq_device_successfully(self):
        data_acq_device = DataAcqDevice(name='DataAcqDevice1',
                                        system='System1',
                                        amplifier='Amplifier1',
                                        adc_circuit='adc_circuit1')
        self.nwb_file_content.add_device(data_acq_device)

        nwb_file_handler = NWBHDF5IO('data_acq_device.nwb', mode='w')
        nwb_file_handler.write(self.nwb_file_content)
        nwb_file_handler.close()

        self.assertTrue(os.path.exists('data_acq_device.nwb'))
        with pynwb.NWBHDF5IO('data_acq_device.nwb', 'r',
                             load_namespaces=True) as nwb_file_handler:
            nwb_file = nwb_file_handler.read()
            self.assertContainerEqual(nwb_file.devices['DataAcqDevice1'],
                                      data_acq_device)

        self.delete_nwb('data_acq_device')

    def test_read_nwb_camera_device_successfully(self):
        camera_device = CameraDevice(name='CameraDevice1',
                                     meters_per_pixel=0.20,
                                     camera_name='test name',
                                     model='ndx2000',
                                     lens='500dpt',
                                     manufacturer='sony')
        self.nwb_file_content.add_device(camera_device)

        nwb_file_handler = NWBHDF5IO('camera_device.nwb', mode='w')
        nwb_file_handler.write(self.nwb_file_content)
        nwb_file_handler.close()

        self.assertTrue(os.path.exists('camera_device.nwb'))
        with pynwb.NWBHDF5IO('camera_device.nwb', 'r',
                             load_namespaces=True) as nwb_file_handler:
            nwb_file = nwb_file_handler.read()
            self.assertContainerEqual(nwb_file.devices['CameraDevice1'],
                                      camera_device)

        self.delete_nwb('camera_device')

    def test_read_nwb_nwb_image_series_successfully(self):
        device_1 = Device('device1')
        device_2 = Device('device2')
        mock_timestamps = [1, 2, 3]
        mock_external_file = ['some file']

        nwb_image_series = NwbImageSeries(name='NwbImageSeries1',
                                          timestamps=mock_timestamps,
                                          external_file=mock_external_file,
                                          devices=[device_1, device_2])

        behavioral_time_series = BehavioralEvents(name="BehavioralTimeSeries")
        behavioral_time_series.add_timeseries(nwb_image_series)
        processing_module = ProcessingModule(name='ProcessingModule',
                                             description='')
        processing_module.add_data_interface(behavioral_time_series)
        self.nwb_file_content.add_processing_module(processing_module)

        self.nwb_file_content.add_stimulus_template(nwb_image_series)

        nwb_file_handler = NWBHDF5IO('nwb_image_series.nwb', mode='w')
        nwb_file_handler.write(self.nwb_file_content)
        nwb_file_handler.close()

        self.assertTrue(os.path.exists('nwb_image_series.nwb'))
        with pynwb.NWBHDF5IO('nwb_image_series.nwb', 'r',
                             load_namespaces=True) as nwb_file_handler:
            nwb_file = nwb_file_handler.read()
            self.assertContainerEqual(
                nwb_file.stimulus_template['NwbImageSeries1'],
                nwb_image_series)
            self.assertContainerEqual(
                nwb_file.processing['ProcessingModule'].data_interfaces[
                    'BehavioralTimeSeries'].time_series['NwbImageSeries1'],
                nwb_image_series)

        self.delete_nwb('nwb_image_series')

    def test_read_nwb_nwb_electrode_group_successfully(self):
        device = Device('device_0')
        self.nwb_file_content.add_device(device)
        nwb_electrode_group = NwbElectrodeGroup(
            name='nwb_electrode_group_0',
            description='Sample description',
            location='Sample location',
            device=device,
            targeted_location='predicted location',
            targeted_x=1.0,
            targeted_y=2.0,
            targeted_z=3.0,
            units='um')

        self.nwb_file_content.add_electrode_group(nwb_electrode_group)
        nwb_file_handler = NWBHDF5IO('nwb_electrode_group.nwb', mode='w')
        nwb_file_handler.write(self.nwb_file_content)
        nwb_file_handler.close()

        self.assertTrue(os.path.exists('nwb_electrode_group.nwb'))
        with pynwb.NWBHDF5IO('nwb_electrode_group.nwb',
                             'r') as nwb_file_handler:
            nwb_file = nwb_file_handler.read()
            self.assertEqual(
                nwb_file.electrode_groups['nwb_electrode_group_0'].name,
                nwb_electrode_group.name)
            self.assertEqual(
                nwb_file.electrode_groups['nwb_electrode_group_0'].
                targeted_location, nwb_electrode_group.targeted_location)

        self.delete_nwb('nwb_electrode_group')

    def test_read_nwb_associated_files_successfully(self):
        associated_files = AssociatedFiles(
            name='file1',
            description='description of file1',
            content='1 2 3 content of file test',
            task_epochs='1, 2')
        self.nwb_file_content.add_processing_module(
            ProcessingModule('associated_files',
                             'description_of_associaed_files'))
        self.nwb_file_content.processing['associated_files'].add(
            associated_files)

        nwb_file_handler = NWBHDF5IO('associated_files.nwb', mode='w')
        nwb_file_handler.write(self.nwb_file_content)
        nwb_file_handler.close()

        self.assertTrue(os.path.exists('associated_files.nwb'))
        with pynwb.NWBHDF5IO('associated_files.nwb', 'r') as nwb_file_handler:
            nwb_file = nwb_file_handler.read()

            self.assertIsInstance(
                nwb_file.processing['associated_files']['file1'],
                AssociatedFiles)
            self.assertEqual(
                'file1', nwb_file.processing['associated_files']['file1'].name)
            self.assertEqual(
                'description of file1', nwb_file.processing['associated_files']
                ['file1'].fields['description'])
            self.assertEqual(
                '1 2 3 content of file test',
                nwb_file.processing['associated_files']
                ['file1'].fields['content'])
            self.assertEqual(
                '1, 2', nwb_file.processing['associated_files']
                ['file1'].fields['task_epochs'])

        self.delete_nwb('associated_files')

    @staticmethod
    def delete_nwb(filename):
        os.remove(filename + '.nwb')
Esempio n. 25
0
def nwb_copy_file(old_file, new_file, cp_objs={}):
    """
    Copy fields defined in 'obj', from existing NWB file to new NWB file.

    Parameters
    ----------
    old_file : str, path
        String such as '/path/to/old_file.nwb'.
    new_file : str, path
        String such as '/path/to/new_file.nwb'.
    cp_objs : dict
        Name:Value pairs (Group:Children) listing the groups and respective
        children from the current NWB file to be copied. Children can be:
        - Boolean, indicating an attribute (e.g. for institution, lab)
        - List of strings, containing several children names
        Example:
        {'institution':True,
         'lab':True,
         'acquisition':['microphone'],
         'ecephys':['LFP','DecompositionSeries']}
    """

    manager = get_manager()

    # Open original signal file
    with NWBHDF5IO(old_file, 'r', manager=manager,
                   load_namespaces=True) as io1:
        nwb_old = io1.read()

        # Creates new file
        nwb_new = NWBFile(session_description=str(nwb_old.session_description),
                          identifier='',
                          session_start_time=datetime.now(tzlocal()))
        with NWBHDF5IO(new_file, mode='w', manager=manager,
                       load_namespaces=False) as io2:
            # Institution name ------------------------------------------------
            if 'institution' in cp_objs:
                nwb_new.institution = str(nwb_old.institution)

            # Lab name --------------------------------------------------------
            if 'lab' in cp_objs:
                nwb_new.lab = str(nwb_old.lab)

            # Session id ------------------------------------------------------
            if 'session' in cp_objs:
                nwb_new.session_id = nwb_old.session_id

            # Devices ---------------------------------------------------------
            if 'devices' in cp_objs:
                for aux in list(nwb_old.devices.keys()):
                    dev = Device(nwb_old.devices[aux].name)
                    nwb_new.add_device(dev)

            # Electrode groups ------------------------------------------------
            if 'electrode_groups' in cp_objs:
                for aux in list(nwb_old.electrode_groups.keys()):
                    nwb_new.create_electrode_group(
                        name=str(nwb_old.electrode_groups[aux].name),
                        description=str(nwb_old.electrode_groups[
                            aux].description),
                        location=str(nwb_old.electrode_groups[aux].location),
                        device=nwb_new.get_device(
                            nwb_old.electrode_groups[aux].device.name)
                    )

            # Electrodes ------------------------------------------------------
            if 'electrodes' in cp_objs:
                nElec = len(nwb_old.electrodes['x'].data[:])
                for aux in np.arange(nElec):
                    nwb_new.add_electrode(
                        x=nwb_old.electrodes['x'][aux],
                        y=nwb_old.electrodes['y'][aux],
                        z=nwb_old.electrodes['z'][aux],
                        imp=nwb_old.electrodes['imp'][aux],
                        location=str(nwb_old.electrodes['location'][aux]),
                        filtering=str(nwb_old.electrodes['filtering'][aux]),
                        group=nwb_new.get_electrode_group(
                            nwb_old.electrodes['group'][aux].name),
                        group_name=str(nwb_old.electrodes['group_name'][aux])
                    )
                # if there are custom variables
                new_vars = list(nwb_old.electrodes.colnames)
                default_vars = ['x', 'y', 'z', 'imp', 'location', 'filtering',
                                'group', 'group_name']
                [new_vars.remove(var) for var in default_vars]
                for var in new_vars:

                    if var == 'label':
                        var_data = [str(elem) for elem in nwb_old.electrodes[
                                                          var].data[:]]
                    else:
                        var_data = np.array(nwb_old.electrodes[var].data[:])

                    nwb_new.add_electrode_column(name=str(var),
                                                 description=
                                                 str(nwb_old.electrodes[
                                                     var].description),
                                                 data=var_data)

            # Epochs ----------------------------------------------------------
            if 'epochs' in cp_objs:
                nEpochs = len(nwb_old.epochs['start_time'].data[:])
                for i in np.arange(nEpochs):
                    nwb_new.add_epoch(
                        start_time=nwb_old.epochs['start_time'].data[i],
                        stop_time=nwb_old.epochs['stop_time'].data[i])
                # if there are custom variables
                new_vars = list(nwb_old.epochs.colnames)
                default_vars = ['start_time', 'stop_time', 'tags',
                                'timeseries']
                [new_vars.remove(var) for var in default_vars if
                 var in new_vars]
                for var in new_vars:
                    nwb_new.add_epoch_column(name=var,
                                             description=nwb_old.epochs[
                                                 var].description,
                                             data=nwb_old.epochs[var].data[:])

            # Invalid times ---------------------------------------------------
            if 'invalid_times' in cp_objs:
                nInvalid = len(nwb_old.invalid_times['start_time'][:])
                for aux in np.arange(nInvalid):
                    nwb_new.add_invalid_time_interval(
                        start_time=nwb_old.invalid_times['start_time'][aux],
                        stop_time=nwb_old.invalid_times['stop_time'][aux])

            # Trials ----------------------------------------------------------
            if 'trials' in cp_objs:
                nTrials = len(nwb_old.trials['start_time'])
                for aux in np.arange(nTrials):
                    nwb_new.add_trial(
                        start_time=nwb_old.trials['start_time'][aux],
                        stop_time=nwb_old.trials['stop_time'][aux])
                # if there are custom variables
                new_vars = list(nwb_old.trials.colnames)
                default_vars = ['start_time', 'stop_time']
                [new_vars.remove(var) for var in default_vars]
                for var in new_vars:
                    nwb_new.add_trial_column(name=var,
                                             description=nwb_old.trials[
                                                 var].description,
                                             data=nwb_old.trials[var].data[:])

            # Intervals -------------------------------------------------------
            if 'intervals' in cp_objs:
                all_objs_names = list(nwb_old.intervals.keys())
                for obj_name in all_objs_names:
                    obj_old = nwb_old.intervals[obj_name]
                    # create and add TimeIntervals
                    obj = TimeIntervals(name=obj_old.name,
                                        description=obj_old.description)
                    nInt = len(obj_old['start_time'])
                    for ind in np.arange(nInt):
                        obj.add_interval(start_time=obj_old['start_time'][ind],
                                         stop_time=obj_old['stop_time'][ind])
                    # Add to file
                    nwb_new.add_time_intervals(obj)

            # Stimulus --------------------------------------------------------
            if 'stimulus' in cp_objs:
                all_objs_names = list(nwb_old.stimulus.keys())
                for obj_name in all_objs_names:
                    obj_old = nwb_old.stimulus[obj_name]
                    obj = TimeSeries(name=obj_old.name,
                                     description=obj_old.description,
                                     data=obj_old.data[:],
                                     rate=obj_old.rate,
                                     resolution=obj_old.resolution,
                                     conversion=obj_old.conversion,
                                     starting_time=obj_old.starting_time,
                                     unit=obj_old.unit)
                    nwb_new.add_stimulus(obj)

            # Processing modules ----------------------------------------------
            if 'ecephys' in cp_objs:
                if cp_objs['ecephys'] is True:
                    interfaces = nwb_old.processing[
                        'ecephys'].data_interfaces.keys()
                else:  # list of items
                    interfaces = [
                        nwb_old.processing['ecephys'].data_interfaces[key]
                        for key in cp_objs['ecephys']
                    ]
                # Add ecephys module to NWB file
                ecephys_module = ProcessingModule(
                    name='ecephys',
                    description='Extracellular electrophysiology data.'
                )
                nwb_new.add_processing_module(ecephys_module)
                for interface_old in interfaces:
                    obj = copy_obj(interface_old, nwb_old, nwb_new)
                    if obj is not None:
                        ecephys_module.add_data_interface(obj)

            # Acquisition -----------------------------------------------------
            if 'acquisition' in cp_objs:
                if cp_objs['acquisition'] is True:
                    all_acq_names = list(nwb_old.acquisition.keys())
                else:  # list of items
                    all_acq_names = cp_objs['acquisition']
                for acq_name in all_acq_names:
                    obj_old = nwb_old.acquisition[acq_name]
                    obj = copy_obj(obj_old, nwb_old, nwb_new)
                    if obj is not None:
                        nwb_new.add_acquisition(obj)

            # Subject ---------------------------------------------------------
            if 'subject' in cp_objs:
                try:
                    cortical_surfaces = CorticalSurfaces()
                    surfaces = nwb_old.subject.cortical_surfaces.surfaces
                    for sfc in list(surfaces.keys()):
                        cortical_surfaces.create_surface(
                            name=surfaces[sfc].name,
                            faces=surfaces[sfc].faces,
                            vertices=surfaces[sfc].vertices)
                    nwb_new.subject = ECoGSubject(
                        cortical_surfaces=cortical_surfaces,
                        subject_id=nwb_old.subject.subject_id,
                        age=nwb_old.subject.age,
                        description=nwb_old.subject.description,
                        genotype=nwb_old.subject.genotype,
                        sex=nwb_old.subject.sex,
                        species=nwb_old.subject.species,
                        weight=nwb_old.subject.weight,
                        date_of_birth=nwb_old.subject.date_of_birth)
                except:
                    nwb_new.subject = Subject(age=nwb_old.subject.age,
                                              description=nwb_old.subject.description,
                                              genotype=nwb_old.subject.genotype,
                                              sex=nwb_old.subject.sex,
                                              species=nwb_old.subject.species,
                                              subject_id=nwb_old.subject.subject_id,
                                              weight=nwb_old.subject.weight,
                                              date_of_birth=nwb_old.subject.date_of_birth)

            # Write new file with copied fields
            io2.write(nwb_new, link_data=False)
Esempio n. 26
0
def add_cell_specimen_table(nwbfile: NWBFile,
                            cell_specimen_table: pd.DataFrame,
                            session_metadata: dict):
    """
    This function takes the cell specimen table and writes the ROIs
    contained within. It writes these to a new NWB imaging plane
    based off the previously supplied metadata

    Parameters
    ----------
    nwbfile: NWBFile
        this is the in memory NWBFile currently being written to which ROI data
        is added
    cell_specimen_table: pd.DataFrame
        this is the DataFrame containing the cells segmented from a ophys
        experiment, stored in json file and loaded.
        example: /home/nicholasc/projects/allensdk/allensdk/test/
                 brain_observatory/behavior/cell_specimen_table_789359614.json
    session_metadata: dict
        Dictionary containing cell_specimen_table related metadata. Should
        include at minimum the following fields:
            "emission_lambda", "excitation_lambda", "indicator",
            "targeted_structure", and ophys_frame_rate"

    Returns
    -------
    nwbfile: NWBFile
        The altered in memory NWBFile object that now has a specimen table
    """
    cell_specimen_metadata = NwbOphysMetadataSchema().load(
        session_metadata, unknown=marshmallow.EXCLUDE)
    cell_roi_table = cell_specimen_table.reset_index().set_index('cell_roi_id')

    # Device:
    device_name: str = nwbfile.lab_meta_data['metadata'].equipment_name
    if device_name.startswith("MESO"):
        device_config = {
            "name": device_name,
            "description": "Allen Brain Observatory - Mesoscope 2P Rig"
        }
    else:
        device_config = {
            "name": device_name,
            "description": "Allen Brain Observatory - Scientifica 2P Rig",
            "manufacturer": "Scientifica"
        }
    nwbfile.create_device(**device_config)
    device = nwbfile.get_device(device_name)

    # FOV:
    fov_width = nwbfile.lab_meta_data['metadata'].field_of_view_width
    fov_height = nwbfile.lab_meta_data['metadata'].field_of_view_height
    imaging_plane_description = "{} field of view in {} at depth {} um".format(
        (fov_width, fov_height), cell_specimen_metadata['targeted_structure'],
        nwbfile.lab_meta_data['metadata'].imaging_depth)

    # Optical Channel:
    optical_channel = OpticalChannel(
        name='channel_1',
        description='2P Optical Channel',
        emission_lambda=cell_specimen_metadata['emission_lambda'])

    # Imaging Plane:
    imaging_plane = nwbfile.create_imaging_plane(
        name='imaging_plane_1',
        optical_channel=optical_channel,
        description=imaging_plane_description,
        device=device,
        excitation_lambda=cell_specimen_metadata['excitation_lambda'],
        imaging_rate=cell_specimen_metadata['ophys_frame_rate'],
        indicator=cell_specimen_metadata['indicator'],
        location=cell_specimen_metadata['targeted_structure'])

    # Image Segmentation:
    image_segmentation = ImageSegmentation(name="image_segmentation")

    if 'ophys' not in nwbfile.processing:
        ophys_module = ProcessingModule('ophys', 'Ophys processing module')
        nwbfile.add_processing_module(ophys_module)
    else:
        ophys_module = nwbfile.processing['ophys']

    ophys_module.add_data_interface(image_segmentation)

    # Plane Segmentation:
    plane_segmentation = image_segmentation.create_plane_segmentation(
        name='cell_specimen_table',
        description="Segmented rois",
        imaging_plane=imaging_plane)

    for col_name in cell_roi_table.columns:
        # the columns 'roi_mask', 'pixel_mask', and 'voxel_mask' are
        # already defined in the nwb.ophys::PlaneSegmentation Object
        if col_name not in [
                'id', 'mask_matrix', 'roi_mask', 'pixel_mask', 'voxel_mask'
        ]:
            # This builds the columns with name of column and description
            # of column both equal to the column name in the cell_roi_table
            plane_segmentation.add_column(
                col_name,
                CELL_SPECIMEN_COL_DESCRIPTIONS.get(col_name,
                                                   "No Description Available"))

    # go through each roi and add it to the plan segmentation object
    for cell_roi_id, table_row in cell_roi_table.iterrows():

        # NOTE: The 'roi_mask' in this cell_roi_table has already been
        # processing by the function from
        # allensdk.brain_observatory.behavior.session_apis.data_io.ophys_lims_api
        # get_cell_specimen_table() method. As a result, the ROI is stored in
        # an array that is the same shape as the FULL field of view of the
        # experiment (e.g. 512 x 512).
        mask = table_row.pop('roi_mask')

        csid = table_row.pop('cell_specimen_id')
        table_row['cell_specimen_id'] = -1 if csid is None else csid
        table_row['id'] = cell_roi_id
        plane_segmentation.add_roi(image_mask=mask, **table_row.to_dict())

    return nwbfile
Esempio n. 27
0
def conversion_function(source_paths,
                        f_nwb,
                        metadata,
                        add_raw=False,
                        add_processed=True,
                        add_behavior=True,
                        plot_rois=False):
    """
    Copy data stored in a set of .npz files to a single NWB file.

    Parameters
    ----------
    source_paths : dict
        Dictionary with paths to source files/directories. e.g.:
        {'raw_data': {'type': 'file', 'path': ''},
         'raw_info': {'type': 'file', 'path': ''}
         'processed_data': {'type': 'file', 'path': ''},
         'sparse_matrix': {'type': 'file', 'path': ''},
         'ref_image',: {'type': 'file', 'path': ''}}
    f_nwb : str
        Path to output NWB file, e.g. 'my_file.nwb'.
    metadata : dict
        Metadata dictionary
    add_raw : bool
        Whether to convert raw data or not.
    add_processed : bool
        Whether to convert processed data or not.
    add_behavior : bool
        Whether to convert behavior data or not.
    plot_rois : bool
        Plot ROIs
    """

    # Source files
    file_raw = None
    file_info = None
    file_processed = None
    file_sparse_matrix = None
    file_reference_image = None
    for k, v in source_paths.items():
        if source_paths[k]['path'] != '':
            fname = source_paths[k]['path']
            if k == 'raw_data':
                file_raw = h5py.File(fname, 'r')
            if k == 'raw_info':
                file_info = scipy.io.loadmat(fname,
                                             struct_as_record=False,
                                             squeeze_me=True)
            if k == 'processed_data':
                file_processed = np.load(fname)
            if k == 'sparse_matrix':
                file_sparse_matrix = np.load(fname)
            if k == 'ref_image':
                file_reference_image = np.load(fname)

    # Initialize a NWB object
    nwb = NWBFile(**metadata['NWBFile'])

    # Create and add device
    device = Device(name=metadata['Ophys']['Device'][0]['name'])
    nwb.add_device(device)

    # Creates one Imaging Plane for each channel
    fs = 1. / (file_processed['time'][0][1] - file_processed['time'][0][0])
    for meta_ip in metadata['Ophys']['ImagingPlane']:
        # Optical channel
        opt_ch = OpticalChannel(
            name=meta_ip['optical_channel'][0]['name'],
            description=meta_ip['optical_channel'][0]['description'],
            emission_lambda=meta_ip['optical_channel'][0]['emission_lambda'])
        nwb.create_imaging_plane(
            name=meta_ip['name'],
            optical_channel=opt_ch,
            description=meta_ip['description'],
            device=device,
            excitation_lambda=meta_ip['excitation_lambda'],
            imaging_rate=fs,
            indicator=meta_ip['indicator'],
            location=meta_ip['location'],
        )

    # Raw optical data
    if add_raw:
        print('Adding raw data...')
        for meta_tps in metadata['Ophys']['TwoPhotonSeries']:
            if meta_tps['name'][-1] == 'R':
                raw_data = file_raw['R']
            else:
                raw_data = file_raw['Y']

            def data_gen(data):
                xl, yl, zl, tl = data.shape
                chunk = 0
                while chunk < tl:
                    val = data[:, :, :, chunk]
                    chunk += 1
                    print('adding data chunk: ', chunk)
                    yield val

            xl, yl, zl, tl = raw_data.shape
            tps_data = DataChunkIterator(data=data_gen(data=raw_data),
                                         iter_axis=0,
                                         maxshape=(tl, xl, yl, zl))

            # Change dimensions from (X,Y,Z,T) in mat file to (T,X,Y,Z) nwb standard
            #raw_data = np.moveaxis(raw_data, -1, 0)

            tps = TwoPhotonSeries(
                name=meta_tps['name'],
                imaging_plane=nwb.imaging_planes[meta_tps['imaging_plane']],
                data=tps_data,
                rate=file_info['info'].daq.scanRate)
            nwb.add_acquisition(tps)

    # Processed data
    if add_processed:
        print('Adding processed data...')
        ophys_module = ProcessingModule(
            name='Ophys',
            description='contains optical physiology processed data.',
        )
        nwb.add_processing_module(ophys_module)

        # Create Image Segmentation compartment
        img_seg = ImageSegmentation(
            name=metadata['Ophys']['ImageSegmentation']['name'])
        ophys_module.add(img_seg)

        # Create plane segmentation and add ROIs
        meta_ps = metadata['Ophys']['ImageSegmentation'][
            'plane_segmentations'][0]
        ps = img_seg.create_plane_segmentation(
            name=meta_ps['name'],
            description=meta_ps['description'],
            imaging_plane=nwb.imaging_planes[meta_ps['imaging_plane']],
        )

        # Add ROIs
        indices = file_sparse_matrix['indices']
        indptr = file_sparse_matrix['indptr']
        dims = np.squeeze(file_processed['dims'])
        for start, stop in zip(indptr, indptr[1:]):
            voxel_mask = make_voxel_mask(indices[start:stop], dims)
            ps.add_roi(voxel_mask=voxel_mask)

        # Visualize 3D voxel masks
        if plot_rois:
            plot_rois_function(plane_segmentation=ps, indptr=indptr)

        # DFF measures
        dff = DfOverF(name=metadata['Ophys']['DfOverF']['name'])
        ophys_module.add(dff)

        # create ROI regions
        n_cells = file_processed['dFF'].shape[0]
        roi_region = ps.create_roi_table_region(description='RoiTableRegion',
                                                region=list(range(n_cells)))

        # create ROI response series
        dff_data = file_processed['dFF']
        tt = file_processed['time'].ravel()
        meta_rrs = metadata['Ophys']['DfOverF']['roi_response_series'][0]
        meta_rrs['data'] = dff_data.T
        meta_rrs['rois'] = roi_region
        meta_rrs['timestamps'] = tt
        dff.create_roi_response_series(**meta_rrs)

        # Creates GrayscaleVolume containers and add a reference image
        grayscale_volume = GrayscaleVolume(
            name=metadata['Ophys']['GrayscaleVolume']['name'],
            data=file_reference_image['im'])
        ophys_module.add(grayscale_volume)

    # Behavior data
    if add_behavior:
        print('Adding behavior data...')
        # Ball motion
        behavior_mod = nwb.create_processing_module(
            name='Behavior',
            description='holds processed behavior data',
        )
        meta_ts = metadata['Behavior']['TimeSeries'][0]
        meta_ts['data'] = file_processed['ball'].ravel()
        tt = file_processed['time'].ravel()
        meta_ts['timestamps'] = tt
        behavior_ts = TimeSeries(**meta_ts)
        behavior_mod.add(behavior_ts)

        # Re-arranges spatial data of body-points positions tracking
        pos = file_processed['dlc']
        n_points = 8
        pos_reshaped = pos.reshape(
            (-1, n_points, 3))  # dims=(nSamples,n_points,3)

        # Creates a Position object and add one SpatialSeries for each body-point position
        position = Position()
        for i in range(n_points):
            position.create_spatial_series(
                name='SpatialSeries_' + str(i),
                data=pos_reshaped[:, i, :],
                timestamps=tt,
                reference_frame=
                'Description defining what the zero-position is.',
                conversion=np.nan)
        behavior_mod.add(position)

    # Trial times
    trialFlag = file_processed['trialFlag'].ravel()
    trial_inds = np.hstack(
        (0, np.where(np.diff(trialFlag))[0], trialFlag.shape[0] - 1))
    trial_times = tt[trial_inds]

    for start, stop in zip(trial_times, trial_times[1:]):
        nwb.add_trial(start_time=start, stop_time=stop)

    # Saves to NWB file
    with NWBHDF5IO(f_nwb, mode='w') as io:
        io.write(nwb)
    print('NWB file saved with size: ', os.stat(f_nwb).st_size / 1e6, ' mb')
Esempio n. 28
0
    institution='University College London',
    lab='The Carandini & Harris Lab',
    subject=subject,
    experimenter='Nick Steinmetz',
    experiment_description=
    'Large-scale Neuropixels recordings across brain regions '
    'of mice during a head-fixed visual discrimination task. ',
    related_publications='DOI 10.1038/s41586-019-1787-x',
    keywords=[
        'Neural coding', 'Neuropixels', 'mouse', 'brain-wide', 'vision',
        'visual discrimination', 'electrophysiology'
    ],
)

behavior_module = ProcessingModule('behavior', 'behavior module')
nwb_file.add_processing_module(behavior_module)
################################################################################
# PROCESS DATA


def read_npy_file(filename):
    """
    Loads .npy file into numpy array
    :param filename: name of the file being loaded, .npy file
    :return: numpy array
    """
    np_arr = np.load(filename)
    return np_arr


def get_rate(timestamps):