Esempio n. 1
0
def scan_folder_for_NWBfiles(folder, Nmax=1000000, verbose=True):

    if verbose:
        print('inspecting the folder "%s" [...]' % folder)
        t0 = time.time()

    FILES = get_files_with_extension(folder, extension='.nwb', recursive=True)
    DATES = np.array([f.split(os.path.sep)[-1].split('-')[0] for f in FILES])
    SUBJECTS = []

    for f in FILES[:Nmax]:
        try:
            data = Data(f, metadata_only=True)
            SUBJECTS.append(data.metadata['subject_ID'])
        except BaseException as be:
            SUBJECTS.append('N/A')
            if verbose:
                print(be)
                print('\n /!\ Pb with "%s" \n' % f)

    if verbose:
        print(' -> found n=%i datafiles (in %.1fs) ' % (len(FILES),
                                                        (time.time() - t0)))

    return np.array(FILES), np.array(DATES), np.array(SUBJECTS)
Esempio n. 2
0
def build_Ca_filelist(folder):
    
    CA_FILES = {'Bruker_folder':[], 'Bruker_file':[],
                'date':[], 'protocol':[],'StartTimeString':[],
                'StartTime':[], 'EndTime':[], 'absoluteTime':[]}
    
    for bdf in get_TSeries_folders(folder):
        fn = get_files_with_extension(bdf, extension='.xml')[0]
        try:
            xml = bruker_xml_parser(fn)
            if len(xml['Ch1']['relativeTime'])>0:
                CA_FILES['date'].append(stringdatetime_to_date(xml['date']))
                CA_FILES['Bruker_folder'].append(bdf)
                CA_FILES['Bruker_file'].append(fn)
                CA_FILES['StartTimeString'].append(xml['StartTime'])
                start = StartTime_to_day_seconds(xml['StartTime'])
                CA_FILES['StartTime'].append(start+xml['Ch1']['absoluteTime'][0])
                CA_FILES['EndTime'].append(start+xml['Ch1']['absoluteTime'][-1])
                CA_FILES['protocol'].append('')
        except BaseException as e:
            print(e)
            print(100*'-')
            print('Problem with file: "%s"' % fn)
            print(100*'-')

    return CA_FILES
Esempio n. 3
0
    def compute_subjects(self):

        FILES = get_files_with_extension(FOLDERS[self.fbox.currentText()],
                                         extension='.nwb',
                                         recursive=True)

        SUBJECTS, DISPLAY_NAMES = [], []
        for fn in FILES:
            infos = self.preload_datafolder(fn)
            SUBJECTS.append(infos['subject'])
            DISPLAY_NAMES.append(infos['display_name'])

        self.SUBJECTS = {}
        for s in np.unique(SUBJECTS):
            cond = (np.array(SUBJECTS) == s)
            self.SUBJECTS[s] = {
                'display_names': np.array(DISPLAY_NAMES)[cond],
                'datafiles': np.array(FILES)[cond]
            }

        print(' -> found n=%i subjects ' % len(self.SUBJECTS.keys()))
        self.sbox.clear()
        self.sbox.addItems([self.subject_default_key]+\
                           list(self.SUBJECTS.keys()))
        self.sbox.setCurrentIndex(0)
Esempio n. 4
0
    def scan_folder(self):

        print('inspecting the folder "%s" [...]' %
              FOLDERS[self.fbox.currentText()])

        FILES = get_files_with_extension(FOLDERS[self.fbox.currentText()],
                                         extension='.nwb',
                                         recursive=True)

        DATES = np.array(
            [f.split(os.path.sep)[-1].split('-')[0] for f in FILES])

        self.FILES_PER_DAY = {}

        for d in np.unique(DATES):
            try:
                self.cal.setDateTextFormat(
                    QtCore.QDate(
                        datetime.date(*[int(dd) for dd in d.split('_')])),
                    self.highlight_format)
                self.FILES_PER_DAY[d] = [os.path.join(FOLDERS[self.fbox.currentText()], f)\
                                         for f in np.array(FILES)[DATES==d]]
            except BaseException as be:
                print(be)
            # except ValueError:
            #     pass

        print(' -> found n=%i datafiles ' % len(FILES))
Esempio n. 5
0
    def scan_folder(self):

        print('inspecting the folder "%s" [...]' %
              FOLDERS[self.fbox.currentText()])

        FILES0 = get_files_with_extension(FOLDERS[self.fbox.currentText()],
                                          extension='.nwb',
                                          recursive=True)

        TIMES, DATES, FILES = [], [], []
        for f in FILES0:
            Time = f.split(os.path.sep)[-1].replace('.nwb', '').split('-')
            if len(Time) >= 4:
                TIMES.append(3600 * int(Time[0]) + 60 * int(Time[1]) +
                             int(Time[2]))
                DATES.append(f.split(os.path.sep)[-1].split('-')[0])
                FILES.append(f)

        TIMES, DATES, FILES = np.array(TIMES), np.array(DATES), np.array(FILES)
        NDATES = np.array([
            datetime.date(*[int(dd) for dd in date.split('_')]).toordinal()
            for date in DATES
        ])
        self.FILES_PER_DAY = {}

        guiparts.reinit_calendar(
            self,
            min_date=tuple(
                int(dd) for dd in DATES[np.argmin(NDATES)].split('_')),
            max_date=tuple(
                int(dd) for dd in DATES[np.argmax(NDATES)].split('_')))
        for d in np.unique(DATES):
            try:
                self.cal.setDateTextFormat(
                    QtCore.QDate(
                        datetime.date(*[int(dd) for dd in d.split('_')])),
                    self.highlight_format)
                day_cond = (DATES == d)
                time_sorted = np.argsort(TIMES[day_cond])
                self.FILES_PER_DAY[d] = [os.path.join(FOLDERS[self.fbox.currentText()], f)\
                                         for f in np.array(FILES)[day_cond][time_sorted]]
            except BaseException as be:
                print(be)
                print('error for date %s' % d)

        print(' -> found n=%i datafiles ' % len(FILES))
    def load_data(self, folder):

        if folder != '':
            self.folder = folder
            xml_file = os.path.join(
                folder,
                get_files_with_extension(self.folder, extension='.xml')[0])
            self.bruker_data = bruker_xml_parser(xml_file)

            self.cframe = 0
            self.nframes = len(
                self.bruker_data[self.channelB.currentText()]['tifFile'])
            self.display_frames()
            self.ROIS = []
            if os.path.isfile(os.path.join(self.folder, 'cells.npy')):
                cells = np.load(os.path.join(self.folder, 'cells.npy'),
                                allow_pickle=True).item()
                for x, y, z in zip(cells['x'], cells['y'], cells['z']):
                    self.ROIS.append(cellROI(depth=z, pos=(x, y), parent=self))
        else:
            print('"%s" is not a valid folder' % folder)
Esempio n. 7
0
    def compute_subjects(self):

        FILES = get_files_with_extension(FOLDERS[self.fbox.currentText()],
                                         extension='.nwb',
                                         recursive=True)
        print(
            ' looping over n=%i datafiles to fetch "subjects" metadata [...]' %
            len(FILES))
        DATES = np.array(
            [f.split(os.path.sep)[-1].split('-')[0] for f in FILES])

        SUBJECTS, DISPLAY_NAMES, SDATES, NDATES = [], [], [], []
        for fn, date in zip(FILES, DATES):
            infos = self.preload_datafolder(fn)
            SDATES.append(date)
            SUBJECTS.append(infos['subject'])
            DISPLAY_NAMES.append(infos['display_name'])
            NDATES.append(
                datetime.date(*[int(dd)
                                for dd in date.split('_')]).toordinal())

        self.SUBJECTS = {}
        for s in np.unique(SUBJECTS):
            cond = (np.array(SUBJECTS) == s)
            self.SUBJECTS[s] = {
                'display_names': np.array(DISPLAY_NAMES)[cond],
                'datafiles': np.array(FILES)[cond],
                'dates': np.array(SDATES)[cond],
                'dates_num': np.array(NDATES)[cond]
            }

        print(' -> found n=%i subjects ' % len(self.SUBJECTS.keys()))
        self.sbox.clear()
        self.sbox.addItems([self.subject_default_key]+\
                           list(self.SUBJECTS.keys()))
        self.sbox.setCurrentIndex(0)
Esempio n. 8
0
    #     compression_metadata = {'tool':tool,
    #                             'subsampling':subsampling,
    #                             'smoothing':smoothing,
    #                             'extension':extension,
    #                             'Nframe_per_file':Nframe_per_file,
    #                             'max_file':max_file}
    #     np.save(os.path.join(directory, 'compression-metadata.npy'), compression_metadata)


if __name__ == '__main__':

    # fn = '/home/yann/DATA/2020_11_03/TSeries-28102020-231-00-031/'
    folder = '/home/yann/DATA/2020_11_03/'

    for bdf in list_TSeries_folder(folder)[6:]:
        fn = get_files_with_extension(bdf, extension='.xml')[0]
        xml = bruker_xml_parser(fn)
        binaryFile = os.path.join(bdf, 'suite2p', 'plane0', 'data.bin')
        if len(xml['Ch1']['absoluteTime']) > 10 and os.path.isfile(binaryFile):
            compress_FluorescenceMovie(bdf,
                                       xml,
                                       Nframe_per_file=1000,
                                       max_file=1,
                                       verbose=True,
                                       subsampling=0,
                                       smoothing=0)
            os.system('ls -lh %s ' % os.path.join(bdf, 'suite2p', 'plane0'))

    # import argparse

    # parser=argparse.ArgumentParser()
Esempio n. 9
0
    def run(self):

        if self.destination_folder == '':
            self.destination_folder = FOLDERS[self.destBox.currentText()]
        if self.source_folder == '':
            self.source_folder = FOLDERS[self.sourceBox.currentText()]

        if '10.0.0.' in self.destination_folder:
            print('writing a bash script to be executed as: "bash temp.sh" ')
            F = open('temp.sh', 'w')
            F.write('echo "Password for %s ? "\n' % self.destination_folder)
            F.write('read passwd\n')
        else:
            print('starting copy [...]')

        if self.typeBox.currentText() == 'NWB':
            ##############################################
            #############      NWB file         ##########
            ##############################################
            FILES = get_files_with_extension(self.source_folder,
                                             extension='.nwb',
                                             recursive=True)
            for f in FILES:
                if '10.0.0.' in self.destination_folder:
                    F.write('sshpass -p $passwd rsync -avhP %s %s \n' %
                            (f, self.destination_folder))
                else:
                    cmd = self.file_copy_command(f, self.destination_folder)
                    print('"%s" launched as a subprocess' % cmd)
                    p = subprocess.Popen(cmd, shell=True)
        elif self.typeBox.currentText() == 'FULL':
            if '10.0.0.' in self.destination_folder:
                F.write('sshpass -p $passwd rsync -avhP %s %s \n' %
                        (self.source_folder, self.destination_folder))
            else:
                print(' copying "%s" [...]' % self.source_folder)
                self.folder_copy_command(self.source_folder,
                                         self.destination_folder)

        elif ('Imaging' in self.typeBox.currentText()):
            ##############################################
            #############      Imaging         ##########
            ##############################################
            if 'TSeries' in str(self.source_folder):
                folders = [self.source_folder]
            else:
                folders = get_TSeries_folders(self.source_folder)
            print('processing: ', folders)

            for f in folders:
                new_folder = os.path.join(self.destination_folder,
                                          'TSeries' + f.split('TSeries')[1])
                if '10.0.0.' in self.destination_folder:
                    F.write('sshpass -p $passwd ssh %s mkdir %s \n' %
                            (self.destination_folder.split(':')[0],
                             new_folder.split(':')[1]))
                    F.write('sshpass -p $passwd ssh %s mkdir %s \n' %
                            (self.destination_folder.split(':')[0],
                             new_folder.split(':')[1] + '/suite2p'))
                else:
                    pathlib.Path(new_folder).mkdir(parents=True, exist_ok=True)
                # XML metadata file
                xml = get_files_with_extension(f,
                                               extension='.xml',
                                               recursive=False)
                if len(xml) > 0:
                    if '10.0.0.' in self.destination_folder:
                        F.write('sshpass -p $passwd rsync -avhP %s %s \n' %
                                (xml[0], new_folder))
                    else:
                        print(' copying "%s" [...]' % xml[0])
                        subprocess.Popen(self.file_copy_command(
                            xml[0], new_folder),
                                         shell=True)
                else:
                    print(' /!\ Problem no "xml" found !! /!\  ')
                # XML metadata file
                Fsuite2p = os.path.join(f, 'suite2p')
                iplane = 0
                while os.path.isdir(os.path.join(Fsuite2p,
                                                 'plane%i' % iplane)):
                    npys = get_files_with_extension(os.path.join(
                        Fsuite2p, 'plane%i' % iplane),
                                                    extension='.npy',
                                                    recursive=False)
                    inewfolder = os.path.join(new_folder, 'suite2p',
                                              'plane%i' % iplane)
                    if '10.0.0.' in self.destination_folder:
                        F.write('sshpass -p $passwd ssh %s mkdir %s \n' %
                                (self.destination_folder.split(':')[0],
                                 new_folder.split(':')[1] +
                                 '/suite2p/plane%i' % iplane))
                    else:
                        pathlib.Path(inewfolder).mkdir(parents=True,
                                                       exist_ok=True)
                    for n in npys:
                        if '10.0.0.' in self.destination_folder:
                            F.write('sshpass -p $passwd rsync -avhP %s %s \n' %
                                    (n, inewfolder))
                        else:
                            print(' copying "%s" [...]' % n)
                            subprocess.Popen(self.file_copy_command(
                                n, inewfolder),
                                             shell=True)

                    if ('binary' in self.typeBox.currentText()) or (
                            'full' in self.typeBox.currentText()):
                        if os.path.isfile(
                                os.path.join(Fsuite2p, 'plane%i' % iplane,
                                             'data.bin')):
                            print(' copying "%s" [...]' % os.path.join(
                                Fsuite2p, 'plane%i' % iplane, 'data.bin'))
                            if '10.0.0.' in self.destination_folder:
                                F.write(
                                    'sshpass -p $passwd rsync -avhP %s %s \n' %
                                    (os.path.join(Fsuite2p, 'plane%i' % iplane,
                                                  'data.bin'), inewfolder))
                            else:
                                print(' copying "%s" [...]' % n)
                                subprocess.Popen(self.file_copy_command(
                                    os.path.join(Fsuite2p, 'plane%i' % iplane,
                                                 'data.bin'), inewfolder),
                                                 shell=True)
                        else:
                            print('In: "%s" ' % os.path.isfile(
                                os.path.join(Fsuite2p, 'plane%i' % iplane)))
                            print(
                                ' /!\ Problem no "binary file" found !! /!\  ')

                    iplane += 1

        if '10.0.0.' in self.destination_folder:
            print('bash script "temp.sh" closed !')
            F.close()
        else:
            print('done (but cmd likely still running as subprocesses)')
Esempio n. 10
0
    def __init__(self,
                 folder,
                 dt=None,
                 times=None,
                 frame_sampling=None,
                 extension='.npy',
                 lazy_loading=True,
                 compression_metadata=None,
                 t0=0):
        """
        IMAGES can be initialized
        """

        self.extension = extension
        self.BINARY_IMAGES, self.VIDS, self.PICS = None, None, None
        self.IMAGES = None

        FILES = sorted(get_files_with_extension(folder, extension=extension))

        ### ASSOCIATING A TEMPORAL SAMPLING
        # ---------------------------------------------
        if times is not None:
            self.t = times
        else:
            if dt is None:
                dt = 1
            self.t = np.arange(len(FILES)) * dt + t0

        ### DEALING WITH FRAME SAMPLING
        # ---------------------------------------------
        # you can specify a specific subsampling here !
        if frame_sampling is not None and times is None:
            print(
                '/!\ Need to pass the specific times if you subsample the frames /!\ '
            )
        elif frame_sampling is None:
            frame_sampling = np.arange(len(self.t))

        ### LOADING FRAMES
        # ---------------------------------------------
        if extension == '.npy':
            self.BINARY_IMAGES = [FILES[f] for f in frame_sampling]
        elif extension == '.npz':
            self.VIDS = FILES
        elif extension == '.avi':
            self.VIDS = FILES
        elif extension == '.mp4':
            self.VIDS = FILES
        elif extension == '.jpeg':
            self.PICS = FILES
        elif extension == '.tiff':
            self.IMAGES = []
            for fn in FILES:
                self.IMAGES.append(np.array(Image.open(fn)))
        else:
            print('Extension', extension, ' not recognized !')

        if lazy_loading and self.VIDS is not None:
            # we just make a map between indices and videos/frames
            self.index_frame_map = []
            for i, fn in enumerate(self.VIDS):
                s = fn.split('imgs-')[1].replace(extension, '').split('-')
                i0, i1 = int(s[0]), int(s[1])
                for i, iframe in enumerate(np.arange(i0, i1 + 1)):
                    if iframe in frame_sampling:
                        self.index_frame_map.append([fn, i])
        elif (self.VIDS is not None):
            print('Pre-loading the full-set of videos [...]')
            self.IMAGES = []
            if extension == '.npz':
                # we pre-load the video !
                for i, fn in enumerate(self.VIDS):
                    s = fn.split('imgs-')[1].replace(extension, '').split('-')
                    i0, i1 = int(s[0]), int(s[1])
                    x = np.load(fn)
                    for i, iframe in enumerate(np.arange(i0, i1 + 1)):
                        if iframe in frame_sampling:
                            self.IMAGES.append(x[i, :, :, 0])
            else:
                for i, fn in enumerate(self.VIDS):
                    s = fn.split('imgs-')[1].replace(extension, '').split('-')
                    i0, i1 = int(s[0]), int(s[1])
                    x = skvideo.io.vread(fn)
                    for i, iframe in enumerate(np.arange(i0, i1 + 1)):
                        if iframe in frame_sampling:
                            self.IMAGES.append(x[i, :, :, 0])
Esempio n. 11
0
def add_ophys(nwbfile,
              args,
              metadata=None,
              with_raw_CaImaging=True,
              with_processed_CaImaging=True,
              Ca_Imaging_options={
                  'Suite2P-binary-filename': 'data.bin',
                  'plane': 0
              }):

    if metadata is None:
        metadata = ast.literal_eval(nwbfile.session_description)
    try:
        CaFn = get_files_with_extension(
            args.CaImaging_folder, extension='.xml')[0]  # get Tseries metadata
    except BaseException as be:
        print(be)
        print('\n /!\  Problem with the CA-IMAGING data in %s  /!\ ' %
              args.datafolder)
        raise Exception

    xml = bruker_xml_parser(CaFn)  # metadata

    onset = (metadata['STEP_FOR_CA_IMAGING_TRIGGER']['onset']
             if 'STEP_FOR_CA_IMAGING_TRIGGER' in metadata else 0)
    CaImaging_timestamps = onset+xml['Ch1']['relativeTime']+\
        float(xml['settings']['framePeriod'])/2. # in the middle in-between two time stamps

    device = pynwb.ophys.Device('Imaging device with settings: \n %s' %
                                str(xml['settings']))  # TO BE FILLED
    nwbfile.add_device(device)
    optical_channel = pynwb.ophys.OpticalChannel(
        'excitation_channel 1', 'Excitation 1',
        float(xml['settings']['laserWavelength']['Excitation 1']))
    imaging_plane = nwbfile.create_imaging_plane(
        'my_imgpln',
        optical_channel,
        description='Depth=%.1f[um]' %
        float(xml['settings']['positionCurrent']['ZAxis']),
        device=device,
        excitation_lambda=float(
            xml['settings']['laserWavelength']['Excitation 1']),
        imaging_rate=1. / float(xml['settings']['framePeriod']),
        indicator='GCamp',
        location='V1',
        # reference_frame='A frame to refer to',
        grid_spacing=(float(xml['settings']['micronsPerPixel']['YAxis']),
                      float(xml['settings']['micronsPerPixel']['XAxis'])))

    Ca_data = None
    if with_raw_CaImaging:

        if args.verbose:
            print('=> Storing Calcium Imaging data [...]')

        Ca_data = BinaryFile(
            Ly=int(xml['settings']['linesPerFrame']),
            Lx=int(xml['settings']['pixelsPerLine']),
            read_filename=os.path.join(
                args.CaImaging_folder, 'suite2p',
                'plane%i' % Ca_Imaging_options['plane'],
                Ca_Imaging_options['Suite2P-binary-filename']))

        CA_SUBSAMPLING = build_subsampling_from_freq(\
                        subsampled_freq=args.CaImaging_frame_sampling,
                        original_freq=1./float(xml['settings']['framePeriod']),
                        N=Ca_data.shape[0], Nmin=3)

        if args.CaImaging_frame_sampling > 0:
            dI = int(1. / args.CaImaging_frame_sampling /
                     float(xml['settings']['framePeriod']))
        else:
            dI = 1

        def Ca_frame_generator():
            for i in CA_SUBSAMPLING:
                yield Ca_data.data[i:i + dI, :, :].mean(axis=0).astype(
                    np.uint8)

        Ca_dataI = DataChunkIterator(data=Ca_frame_generator(),
                                     maxshape=(None, Ca_data.shape[1],
                                               Ca_data.shape[2]),
                                     dtype=np.dtype(np.uint8))
        if args.compression > 0:
            Ca_dataC = H5DataIO(
                data=Ca_dataI,  # with COMPRESSION
                compression='gzip',
                compression_opts=args.compression)
            image_series = pynwb.ophys.TwoPhotonSeries(
                name='CaImaging-TimeSeries',
                dimension=[2],
                data=Ca_dataC,
                imaging_plane=imaging_plane,
                unit='s',
                timestamps=CaImaging_timestamps[CA_SUBSAMPLING])
        else:
            image_series = pynwb.ophys.TwoPhotonSeries(
                name='CaImaging-TimeSeries',
                dimension=[2],
                data=Ca_dataI,
                # data = Ca_data.data[:].astype(np.uint8),
                imaging_plane=imaging_plane,
                unit='s',
                timestamps=CaImaging_timestamps[CA_SUBSAMPLING])
    else:
        image_series = pynwb.ophys.TwoPhotonSeries(name='CaImaging-TimeSeries',
                                                   dimension=[2],
                                                   data=np.ones((2, 2, 2)),
                                                   imaging_plane=imaging_plane,
                                                   unit='s',
                                                   timestamps=1. *
                                                   np.arange(2))
    nwbfile.add_acquisition(image_series)

    if with_processed_CaImaging and os.path.isdir(
            os.path.join(args.CaImaging_folder, 'suite2p')):
        print('=> Adding the suite2p processing [...]')
        add_ophys_processing_from_suite2p(
            os.path.join(args.CaImaging_folder, 'suite2p'),
            nwbfile,
            CaImaging_timestamps,
            device=device,
            optical_channel=optical_channel,
            imaging_plane=imaging_plane,
            image_series=image_series)  # ADD UPDATE OF starting_time
    elif with_processed_CaImaging:
        print('\n /!\  no "suite2p" folder found in "%s"  /!\ ' %
              args.CaImaging_folder)

    return Ca_data
Esempio n. 12
0
def add_ophys(nwbfile,
              args,
              metadata=None,
              with_raw_CaImaging=True,
              with_processed_CaImaging=True,
              Ca_Imaging_options={
                  'Suite2P-binary-filename': 'data.bin',
                  'plane': 0
              }):

    #########################################
    ##########  Loading metadata ############
    #########################################
    if metadata is None:
        metadata = ast.literal_eval(nwbfile.session_description)
    try:
        CaFn = get_files_with_extension(
            args.CaImaging_folder, extension='.xml')[0]  # get Tseries metadata
    except BaseException as be:
        print(be)
        print('\n /!\  Problem with the CA-IMAGING data in %s  /!\ ' %
              args.datafolder)
        raise Exception

    xml = bruker_xml_parser(CaFn)  # metadata

    ##################################################
    ##########  setup-specific quantities ############
    ##################################################
    if 'A1-2P' in metadata['Rig']:
        functional_chan = 'Ch2'  # green channel is channel 2 downstairs
        laser_key = 'Laser'
        Depth = float(xml['settings']['positionCurrent']['ZAxis']['Z Focus']
                      [0])  # center depth only !
    else:
        functional_chan = 'Ch1'
        laser_key = 'Excitation 1'
        Depth = float(xml['settings']['positionCurrent']['ZAxis'])

    ##################################################
    ##########  setup-specific quantities ############
    ##################################################

    device = pynwb.ophys.Device('Imaging device with settings: \n %s' %
                                str(xml['settings']))  # TO BE FILLED
    nwbfile.add_device(device)
    optical_channel = pynwb.ophys.OpticalChannel(
        'excitation_channel 1', laser_key,
        float(xml['settings']['laserWavelength'][laser_key]))

    multiplane = (True if len(np.unique(xml['depth_shift'])) > 1 else False)

    if not multiplane:
        imaging_plane = nwbfile.create_imaging_plane(
            'my_imgpln',
            optical_channel,
            description='Depth=%.1f[um]' %
            (float(metadata['Z-sign-correction-for-rig']) * Depth),
            device=device,
            excitation_lambda=float(
                xml['settings']['laserWavelength'][laser_key]),
            imaging_rate=1. / float(xml['settings']['framePeriod']),
            indicator='GCamp',
            location='V1',  # ADD METADATA HERE
            # reference_frame='A frame to refer to',
            grid_spacing=(float(xml['settings']['micronsPerPixel']['YAxis']),
                          float(xml['settings']['micronsPerPixel']['XAxis'])))
    else:
        # DESCRIBE THE MULTIPLANES HERE !!!!
        imaging_plane = nwbfile.create_imaging_plane(
            'my_imgpln',
            optical_channel,
            description='Depth=%.1f[um]' %
            (float(metadata['Z-sign-correction-for-rig']) * Depth),
            device=device,
            excitation_lambda=float(
                xml['settings']['laserWavelength'][laser_key]),
            imaging_rate=1. / float(xml['settings']['framePeriod']),
            indicator='GCamp',
            location='V1',  # ADD METADATA HERE
            # reference_frame='A frame to refer to',
            grid_spacing=(float(xml['settings']['micronsPerPixel']['YAxis']),
                          float(xml['settings']['micronsPerPixel']['XAxis'])))

    ########################################
    ##### --- DEPRECATED to be fixed ...  ##
    ########################################
    Ca_data = None
    # if with_raw_CaImaging:

    #     if args.verbose:
    #         print('=> Storing Calcium Imaging data [...]')

    #     Ca_data = BinaryFile(Ly=int(xml['settings']['linesPerFrame']),
    #                          Lx=int(xml['settings']['pixelsPerLine']),
    #                          read_filename=os.path.join(args.CaImaging_folder,
    #                                     'suite2p', 'plane%i' % Ca_Imaging_options['plane'],
    #                                                     Ca_Imaging_options['Suite2P-binary-filename']))

    #     CA_SUBSAMPLING = build_subsampling_from_freq(\
    #                     subsampled_freq=args.CaImaging_frame_sampling,
    #                     original_freq=1./float(xml['settings']['framePeriod']),
    #                     N=Ca_data.shape[0], Nmin=3)

    #     if args.CaImaging_frame_sampling>0:
    #         dI = int(1./args.CaImaging_frame_sampling/float(xml['settings']['framePeriod']))
    #     else:
    #         dI = 1

    #     def Ca_frame_generator():
    #         for i in CA_SUBSAMPLING:
    #             yield Ca_data.data[i:i+dI, :, :].mean(axis=0).astype(np.uint8)

    #     Ca_dataI = DataChunkIterator(data=Ca_frame_generator(),
    #                                  maxshape=(None, Ca_data.shape[1], Ca_data.shape[2]),
    #                                  dtype=np.dtype(np.uint8))
    #     if args.compression>0:
    #         Ca_dataC = H5DataIO(data=Ca_dataI, # with COMPRESSION
    #                             compression='gzip',
    #                             compression_opts=args.compression)
    #         image_series = pynwb.ophys.TwoPhotonSeries(name='CaImaging-TimeSeries',
    #                                                    dimension=[2],
    #                                                    data = Ca_dataC,
    #                                                    imaging_plane=imaging_plane,
    #                                                    unit='s',
    #                                                    timestamps = CaImaging_timestamps[CA_SUBSAMPLING])
    #     else:
    #         image_series = pynwb.ophys.TwoPhotonSeries(name='CaImaging-TimeSeries',
    #                                                    dimension=[2],
    #                                                    data = Ca_dataI,
    #                                                    # data = Ca_data.data[:].astype(np.uint8),
    #                                                    imaging_plane=imaging_plane,
    #                                                    unit='s',
    #                                                    timestamps = CaImaging_timestamps[CA_SUBSAMPLING])
    # else:
    #     image_series = pynwb.ophys.TwoPhotonSeries(name='CaImaging-TimeSeries',
    #                                                dimension=[2],
    #                                                data = np.ones((2,2,2)),
    #                                                imaging_plane=imaging_plane,
    #                                                unit='s',
    #                                                timestamps = 1.*np.arange(2))
    # just a dummy version for now
    image_series = pynwb.ophys.TwoPhotonSeries(
        name='CaImaging-TimeSeries\n raw-data-folder=%s' %
        args.CaImaging_folder.replace('/', '_'),
        dimension=[2],
        data=np.ones((2, 2, 2)),
        imaging_plane=imaging_plane,
        unit='s',
        timestamps=1. * np.arange(2))  # TEMPORARY

    nwbfile.add_acquisition(image_series)

    if with_processed_CaImaging and os.path.isdir(
            os.path.join(args.CaImaging_folder, 'suite2p')):
        print('=> Adding the suite2p processing [...]')
        add_ophys_processing_from_suite2p(
            os.path.join(args.CaImaging_folder, 'suite2p'),
            nwbfile,
            xml,
            device=device,
            optical_channel=optical_channel,
            imaging_plane=imaging_plane,
            image_series=image_series)  # ADD UPDATE OF starting_time
    elif with_processed_CaImaging:
        print('\n /!\  no "suite2p" folder found in "%s"  /!\ ' %
              args.CaImaging_folder)

    return Ca_data
Esempio n. 13
0
        default=['protocols'],
        help='')
    parser.add_argument("--remove_all_pdfs",
                        help="remove all pdfs of previous analysis in folder",
                        action="store_true")
    parser.add_argument('-nmax', "--Nmax", type=int, default=1000000)
    parser.add_argument("-v",
                        "--verbose",
                        help="increase output verbosity",
                        action="store_true")

    args = parser.parse_args()

    if args.remove_all_pdfs and os.path.isdir(args.datafile):
        FILES = get_files_with_extension(args.datafile,
                                         extension='.pdf',
                                         recursive=True)
        for f in FILES:
            print('removing', f)
            os.remove(f)
    elif os.path.isdir(args.datafile):
        FILES = get_files_with_extension(args.datafile,
                                         extension='.nwb',
                                         recursive=True)
        for f in FILES:
            try:
                make_summary_pdf(f,
                                 include=args.ops,
                                 Nmax=args.Nmax,
                                 verbose=args.verbose)
            except BaseException as be: