예제 #1
0
def create_module_from_activity(name, description, activity, frequency):
    module = ProcessingModule(name=name, description=description)
    for electrode in activity.keys():
        module.add(
            pynwb.base.TimeSeries('electrode' + str(electrode), activity[electrode], unit="spikes/s", rate=frequency,
                                  comments="Each row corresponds to a specific trial"))
    return module
예제 #2
0
    def test_task_creator_create_task_and_write_to_nwb_successfully(self):
        nwb_content = NWBFile(session_description='demonstrate external files',
                              identifier='NWBE1',
                              session_start_time=datetime(2017,
                                                          4,
                                                          3,
                                                          11,
                                                          tzinfo=tzlocal()),
                              file_create_date=datetime(2017,
                                                        4,
                                                        15,
                                                        12,
                                                        tzinfo=tzlocal()))
        processing_module = ProcessingModule('pm', 'none')

        mock_fl_task_0 = Mock(spec=FlTask)
        mock_fl_task_0.name = 'task_0'
        mock_fl_task_0.description = ''
        mock_fl_task_0.columns = [
            VectorData(name='task_name', description='', data=['Sleep']),
            VectorData(name='task_description',
                       description='',
                       data=['The animal sleeps in a small empty box.']),
            VectorData(name='camera_id', description='', data=[[0]]),
            VectorData(name='task_epochs', description='', data=[[1, 3, 5]]),
        ]
        mock_fl_task_1 = Mock(spec=FlTask)
        mock_fl_task_1.name = 'task_1'
        mock_fl_task_1.description = ''
        mock_fl_task_1.columns = [
            VectorData(name='task_name', description='', data=['Stem+Leaf']),
            VectorData(name='task_description',
                       description='',
                       data=['Spatial Bandit']),
            VectorData(name='camera_id', description='', data=[[1, 2]]),
            VectorData(name='task_epochs', description='', data=[[2, 4]]),
        ]

        task_0 = TaskCreator.create(mock_fl_task_0)
        task_1 = TaskCreator.create(mock_fl_task_1)

        processing_module.add(task_0)
        processing_module.add(task_1)
        nwb_content.add_processing_module(processing_module)

        with NWBHDF5IO(path='task.nwb', mode='w') as nwb_file_io:
            nwb_file_io.write(nwb_content)
            nwb_file_io.close()

        with NWBHDF5IO(path='task.nwb', mode='r') as nwb_file_io:
            nwb_content = nwb_file_io.read()
            self.assertContainerEqual(
                nwb_content.processing['pm'].data_interfaces['task_0'], task_0)
            self.assertContainerEqual(
                nwb_content.processing['pm'].data_interfaces['task_1'], task_1)

        os.remove('task.nwb')
class ProcessingModuleCreator:
    def __init__(self, name, description):
        self.processing_module = ProcessingModule(name, description)

    def insert(self, data):
        try:
            self.processing_module.add(data)
        except TypeError as err:
            # log error instead
            logger.error('Inserting data into processing module has failed: ' +
                         str(err))
 def create_module(self):
     module = ProcessingModule(name=self.name, description=self.description)
     for electrode in self.electrodes:
         module.add(
             pynwb.base.TimeSeries(
                 'electrode' + str(electrode),
                 self.activity[electrode],
                 unit="spikes/s",
                 rate=self.frequency,
                 comments="Each row corresponds to a specific trial"))
     return module
 def create_module(self):
     parameters = pd.DataFrame(self.parameters)
     processed_trf = pd.DataFrame({
         "filtered_trf": self.filtered_trf,
         "cleaned_trf": self.cleaned_trf,
         "electrodes": self.list_electrodes
     })
     trf_module = ProcessingModule(name=self.name,
                                   description=self.description)
     trf_module.add(
         DynamicTable.from_dataframe(self.trf.reset_index(),
                                     name=self.name))
     trf_module.add(
         DynamicTable.from_dataframe(
             parameters.set_index("electrode").reset_index(),
             name="trf_parameters"))
     trf_module.add(
         DynamicTable.from_dataframe(processed_trf.reset_index(),
                                     name="processed_trf"))
     return trf_module
예제 #6
0
def conversion_function(source_paths,
                        f_nwb,
                        metadata,
                        add_raw=False,
                        add_processed=True,
                        add_behavior=True,
                        plot_rois=False):
    """
    Copy data stored in a set of .npz files to a single NWB file.

    Parameters
    ----------
    source_paths : dict
        Dictionary with paths to source files/directories. e.g.:
        {'raw_data': {'type': 'file', 'path': ''},
         'raw_info': {'type': 'file', 'path': ''}
         'processed_data': {'type': 'file', 'path': ''},
         'sparse_matrix': {'type': 'file', 'path': ''},
         'ref_image',: {'type': 'file', 'path': ''}}
    f_nwb : str
        Path to output NWB file, e.g. 'my_file.nwb'.
    metadata : dict
        Metadata dictionary
    add_raw : bool
        Whether to convert raw data or not.
    add_processed : bool
        Whether to convert processed data or not.
    add_behavior : bool
        Whether to convert behavior data or not.
    plot_rois : bool
        Plot ROIs
    """

    # Source files
    file_raw = None
    file_info = None
    file_processed = None
    file_sparse_matrix = None
    file_reference_image = None
    for k, v in source_paths.items():
        if source_paths[k]['path'] != '':
            fname = source_paths[k]['path']
            if k == 'raw_data':
                file_raw = h5py.File(fname, 'r')
            if k == 'raw_info':
                file_info = scipy.io.loadmat(fname,
                                             struct_as_record=False,
                                             squeeze_me=True)
            if k == 'processed_data':
                file_processed = np.load(fname)
            if k == 'sparse_matrix':
                file_sparse_matrix = np.load(fname)
            if k == 'ref_image':
                file_reference_image = np.load(fname)

    # Initialize a NWB object
    nwb = NWBFile(**metadata['NWBFile'])

    # Create and add device
    device = Device(name=metadata['Ophys']['Device'][0]['name'])
    nwb.add_device(device)

    # Creates one Imaging Plane for each channel
    fs = 1. / (file_processed['time'][0][1] - file_processed['time'][0][0])
    for meta_ip in metadata['Ophys']['ImagingPlane']:
        # Optical channel
        opt_ch = OpticalChannel(
            name=meta_ip['optical_channel'][0]['name'],
            description=meta_ip['optical_channel'][0]['description'],
            emission_lambda=meta_ip['optical_channel'][0]['emission_lambda'])
        nwb.create_imaging_plane(
            name=meta_ip['name'],
            optical_channel=opt_ch,
            description=meta_ip['description'],
            device=device,
            excitation_lambda=meta_ip['excitation_lambda'],
            imaging_rate=fs,
            indicator=meta_ip['indicator'],
            location=meta_ip['location'],
        )

    # Raw optical data
    if add_raw:
        print('Adding raw data...')
        for meta_tps in metadata['Ophys']['TwoPhotonSeries']:
            if meta_tps['name'][-1] == 'R':
                raw_data = file_raw['R']
            else:
                raw_data = file_raw['Y']

            def data_gen(data):
                xl, yl, zl, tl = data.shape
                chunk = 0
                while chunk < tl:
                    val = data[:, :, :, chunk]
                    chunk += 1
                    print('adding data chunk: ', chunk)
                    yield val

            xl, yl, zl, tl = raw_data.shape
            tps_data = DataChunkIterator(data=data_gen(data=raw_data),
                                         iter_axis=0,
                                         maxshape=(tl, xl, yl, zl))

            # Change dimensions from (X,Y,Z,T) in mat file to (T,X,Y,Z) nwb standard
            #raw_data = np.moveaxis(raw_data, -1, 0)

            tps = TwoPhotonSeries(
                name=meta_tps['name'],
                imaging_plane=nwb.imaging_planes[meta_tps['imaging_plane']],
                data=tps_data,
                rate=file_info['info'].daq.scanRate)
            nwb.add_acquisition(tps)

    # Processed data
    if add_processed:
        print('Adding processed data...')
        ophys_module = ProcessingModule(
            name='Ophys',
            description='contains optical physiology processed data.',
        )
        nwb.add_processing_module(ophys_module)

        # Create Image Segmentation compartment
        img_seg = ImageSegmentation(
            name=metadata['Ophys']['ImageSegmentation']['name'])
        ophys_module.add(img_seg)

        # Create plane segmentation and add ROIs
        meta_ps = metadata['Ophys']['ImageSegmentation'][
            'plane_segmentations'][0]
        ps = img_seg.create_plane_segmentation(
            name=meta_ps['name'],
            description=meta_ps['description'],
            imaging_plane=nwb.imaging_planes[meta_ps['imaging_plane']],
        )

        # Add ROIs
        indices = file_sparse_matrix['indices']
        indptr = file_sparse_matrix['indptr']
        dims = np.squeeze(file_processed['dims'])
        for start, stop in zip(indptr, indptr[1:]):
            voxel_mask = make_voxel_mask(indices[start:stop], dims)
            ps.add_roi(voxel_mask=voxel_mask)

        # Visualize 3D voxel masks
        if plot_rois:
            plot_rois_function(plane_segmentation=ps, indptr=indptr)

        # DFF measures
        dff = DfOverF(name=metadata['Ophys']['DfOverF']['name'])
        ophys_module.add(dff)

        # create ROI regions
        n_cells = file_processed['dFF'].shape[0]
        roi_region = ps.create_roi_table_region(description='RoiTableRegion',
                                                region=list(range(n_cells)))

        # create ROI response series
        dff_data = file_processed['dFF']
        tt = file_processed['time'].ravel()
        meta_rrs = metadata['Ophys']['DfOverF']['roi_response_series'][0]
        meta_rrs['data'] = dff_data.T
        meta_rrs['rois'] = roi_region
        meta_rrs['timestamps'] = tt
        dff.create_roi_response_series(**meta_rrs)

        # Creates GrayscaleVolume containers and add a reference image
        grayscale_volume = GrayscaleVolume(
            name=metadata['Ophys']['GrayscaleVolume']['name'],
            data=file_reference_image['im'])
        ophys_module.add(grayscale_volume)

    # Behavior data
    if add_behavior:
        print('Adding behavior data...')
        # Ball motion
        behavior_mod = nwb.create_processing_module(
            name='Behavior',
            description='holds processed behavior data',
        )
        meta_ts = metadata['Behavior']['TimeSeries'][0]
        meta_ts['data'] = file_processed['ball'].ravel()
        tt = file_processed['time'].ravel()
        meta_ts['timestamps'] = tt
        behavior_ts = TimeSeries(**meta_ts)
        behavior_mod.add(behavior_ts)

        # Re-arranges spatial data of body-points positions tracking
        pos = file_processed['dlc']
        n_points = 8
        pos_reshaped = pos.reshape(
            (-1, n_points, 3))  # dims=(nSamples,n_points,3)

        # Creates a Position object and add one SpatialSeries for each body-point position
        position = Position()
        for i in range(n_points):
            position.create_spatial_series(
                name='SpatialSeries_' + str(i),
                data=pos_reshaped[:, i, :],
                timestamps=tt,
                reference_frame=
                'Description defining what the zero-position is.',
                conversion=np.nan)
        behavior_mod.add(position)

    # Trial times
    trialFlag = file_processed['trialFlag'].ravel()
    trial_inds = np.hstack(
        (0, np.where(np.diff(trialFlag))[0], trialFlag.shape[0] - 1))
    trial_times = tt[trial_inds]

    for start, stop in zip(trial_times, trial_times[1:]):
        nwb.add_trial(start_time=start, stop_time=stop)

    # Saves to NWB file
    with NWBHDF5IO(f_nwb, mode='w') as io:
        io.write(nwb)
    print('NWB file saved with size: ', os.stat(f_nwb).st_size / 1e6, ' mb')