예제 #1
0
파일: pupil.py 프로젝트: zhoupc/ease
    def grab_timestamps_and_frames(self, key, n_sample_frames=16):

        import cv2

        rel = experiment.Session() * experiment.Scan.EyeVideo() * experiment.Scan.BehaviorFile().proj(
            hdf_file='filename')

        info = (rel & key).fetch1()

        avi_path = lab.Paths().get_local_path("{behavior_path}/{filename}".format(**info))
        # replace number by %d for hdf-file reader

        tmp = info['hdf_file'].split('.')
        if not '%d' in tmp[0]:
            info['hdf_file'] = tmp[0][:-1] + '%d.' + tmp[-1]

        hdf_path = lab.Paths().get_local_path("{behavior_path}/{hdf_file}".format(**info))

        data = read_video_hdf5(hdf_path)
        packet_length = data['analogPacketLen']
        dat_time, _ = ts2sec(data['ts'], packet_length)

        if float(data['version']) == 2.:
            cam_key = 'eyecam_ts'
            eye_time, _ = ts2sec(data[cam_key][0])
        else:
            cam_key = 'cam1ts' if info['rig'] == '2P3' else  'cam2ts'
            eye_time, _ = ts2sec(data[cam_key])

        total_frames = len(eye_time)

        frame_idx = np.floor(np.linspace(0, total_frames - 1, n_sample_frames))

        cap = cv2.VideoCapture(avi_path)
        no_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))

        if total_frames != no_frames:
            warn("{total_frames} timestamps, but {no_frames}  movie frames.".format(total_frames=total_frames,
                                                                                    no_frames=no_frames))
            if total_frames > no_frames and total_frames and no_frames:
                total_frames = no_frames
                eye_time = eye_time[:total_frames]
                frame_idx = np.round(np.linspace(0, total_frames - 1, n_sample_frames)).astype(int)
            else:
                raise PipelineException('Can not reconcile frame count', key)
        frames = []
        for frame_pos in frame_idx:
            cap.set(cv2.CAP_PROP_POS_FRAMES, frame_pos)
            ret, frame = cap.read()

            frames.append(np.asarray(frame, dtype=float)[..., 0])
        frames = np.stack(frames, axis=2)

        return eye_time, frames, total_frames
예제 #2
0
파일: experiment.py 프로젝트: zhoupc/ease
    def local_filenames_as_wildcard(self):
        """Returns the local filename for all parts of this scan (ends in *.tif)."""
        scan_path = (Session() & self).fetch1('scan_path')
        local_path = lab.Paths().get_local_path(scan_path)

        scan_name = (self.__class__() & self).fetch1('filename')
        local_filename = os.path.join(local_path, scan_name) + '*.tif'  # all parts

        return local_filename
예제 #3
0
파일: posture.py 프로젝트: afcarl/pipeline
    def _make_tuples(self, key):
        # Get behavior filename
        behavior_path = (experiment.Session() & key).fetch1('behavior_path')
        local_path = lab.Paths().get_local_path(behavior_path)
        filename = (experiment.Scan.BehaviorFile() & key).fetch1('filename')
        full_filename = os.path.join(local_path, filename)

        # Read file
        data = h5.read_behavior_file(full_filename)

        # Read counter timestamps and convert to seconds
        timestamps_in_secs = h5.ts2sec(data['posture_ts'][0])
        ts = h5.ts2sec(data['ts'], is_packeted=True)
        # edge case when ts and eye ts start in different sides of the master clock max value 2 **32
        if abs(ts[0] - timestamps_in_secs[0]) > 2 ** 31:
            timestamps_in_secs += (2 ** 32 if ts[0] > timestamps_in_secs[0] else -2 ** 32)

        # Fill with NaNs for out-of-range data or mistimed packets (NaNs in ts)
        timestamps_in_secs[timestamps_in_secs < ts[0]] = float('nan')
        timestamps_in_secs[timestamps_in_secs > ts[-1]] = float('nan')
        nan_limits = np.where(np.diff([0, *np.isnan(ts), 0]))[0]
        for start, stop in zip(nan_limits[::2], nan_limits[1::2]):
            lower_ts = float('-inf') if start == 0 else ts[start - 1]
            upper_ts = float('inf') if stop == len(ts) else ts[stop]
            timestamps_in_secs[np.logical_and(timestamps_in_secs > lower_ts,
                                              timestamps_in_secs < upper_ts)] = float('nan')

        # Read video
        filename = (experiment.Scan.PostureVideo() & key).fetch1('filename')
        full_filename = os.path.join(local_path, filename)
        video = cv2.VideoCapture(full_filename)

        # Fix inconsistent num_video_frames vs num_timestamps
        num_video_frames = int(video.get(cv2.CAP_PROP_FRAME_COUNT))
        num_timestamps = len(timestamps_in_secs)
        if num_timestamps != num_video_frames:
            if abs(num_timestamps - num_video_frames) > 1:
                msg = ('Number of movie frames and timestamps differ: {} frames vs {} '
                       'timestamps'). format(num_video_frames, num_timestamps)
                raise PipelineException(msg)
            elif num_timestamps > num_video_frames: # cut timestamps to match video frames
                timestamps_in_secs = timestamps_in_secs[:-1]
            else: # fill with NaNs
                timestamps_in_secs[-1] = float('nan')

        # Get 16 sample frames
        frames = []
        for frame_idx in np.round(np.linspace(0, num_video_frames - 1, 16)).astype(int):
            video.set(cv2.CAP_PROP_POS_FRAMES, frame_idx)
            _, frame = video.read()
            frames.append(np.asarray(frame, dtype=float)[..., 0])
        frames = np.stack(frames, axis=-1)

        # Insert
        self.insert1({**key, 'posture_time': timestamps_in_secs,
                      'total_frames': len(timestamps_in_secs), 'preview_frames': frames})
        self.notify(key, frames)
예제 #4
0
def test_paths():
    rel = experiment.Session() * experiment.Scan.EyeVideo(
    ) * experiment.Scan.BehaviorFile().proj(hdf_file='filename')

    path_info = random.choice(rel.fetch.as_dict())

    tmp = path_info['hdf_file'].split('.')
    if '%d' in tmp[0]:
        # new version
        path_info['hdf_file'] = tmp[0][:-2] + '0.' + tmp[-1]
    else:
        path_info['hdf_file'] = tmp[0][:-1] + '0.' + tmp[-1]

    hdf_path = lab.Paths().get_local_path(
        '{behavior_path}/{hdf_file}'.format(**path_info))
    avi_path = lab.Paths().get_local_path(
        '{behavior_path}/{filename}'.format(**path_info))

    assert_true(os.path.isfile(avi_path) and os.path.isfile(hdf_path))
예제 #5
0
    def _make_tuples(self, key):
        # Get behavior filename
        behavior_path = (experiment.Session() & key).fetch1('behavior_path')
        local_path = lab.Paths().get_local_path(behavior_path)
        filename = (experiment.Scan.BehaviorFile() & key).fetch1('filename')
        full_filename = os.path.join(local_path, filename)

        # Read file
        data = h5.read_behavior_file(full_filename)

        # Get counter timestamps and convert to seconds
        timestamps_in_secs = h5.ts2sec(data['wheel'][1])
        ts = h5.ts2sec(data['ts'], is_packeted=True)
        # edge case when ts and wheel ts start in different sides of the master clock max value 2 **32
        if abs(ts[0] - timestamps_in_secs[0]) > 2 ** 31:
            timestamps_in_secs += (2 ** 32 if ts[0] > timestamps_in_secs[0] else -2 ** 32)

        # Read wheel position counter and fix wrap around at 2 ** 32
        wheel_position = data['wheel'][0]
        wheel_diffs = np.diff(wheel_position)
        for wrap_idx in np.where(abs(wheel_diffs) > 2 ** 31)[0]:
            wheel_position[wrap_idx + 1:] += (2 ** 32 if wheel_diffs[wrap_idx] < 0 else -2 ** 32)
        wheel_position -= wheel_position[0] # start counts at zero

        # Compute wheel velocity
        num_samples = int(round((timestamps_in_secs[-1] - timestamps_in_secs[0]) * 10)) # every 100 msecs
        sample_times = np.linspace(timestamps_in_secs[0], timestamps_in_secs[-1], num_samples)
        sample_position = np.interp(sample_times, timestamps_in_secs, wheel_position)
        counter_velocity = np.gradient(sample_position) * 10 # counts / sec

        # Transform velocity from counts/sec to cm/sec
        wheel_specs = experiment.TreadmillSpecs() * experiment.Session() & key
        diameter, counts_per_rev = wheel_specs.fetch1('diameter', 'counts_per_revolution')
        wheel_perimeter = np.pi * diameter # 1 rev = xx cms
        velocity = (counter_velocity / counts_per_rev) * wheel_perimeter # cm /sec

        # Resample at initial timestamps
        velocity = np.interp(timestamps_in_secs, sample_times, velocity)

        # Fill with NaNs for out-of-range data or mistimed packets
        velocity[timestamps_in_secs < ts[0]] = float('nan')
        velocity[timestamps_in_secs > ts[-1]] = float('nan')
        nan_limits = np.where(np.diff([0, *np.isnan(ts), 0]))[0]
        for start, stop in zip(nan_limits[::2], nan_limits[1::2]):
            lower_ts = float('-inf') if start == 0 else ts[start - 1]
            upper_ts = float('inf') if stop == len(ts) else ts[stop]
            velocity[np.logical_and(timestamps_in_secs > lower_ts,
                                    timestamps_in_secs < upper_ts)] = float('nan')
        timestamps_in_secs[np.isnan(velocity)] = float('nan')

        # Insert
        self.insert1({**key, 'treadmill_time': timestamps_in_secs,
                      'treadmill_raw': data['wheel'][0], 'treadmill_vel': velocity})
        self.notify(key)
예제 #6
0
    def make(self, key):

        print(f'Populating Sync for {key}')

        # Get olfactory h5 path and filename
        olfactory_path = (OdorSession & key).fetch1('odor_path')
        local_path = lab.Paths().get_local_path(olfactory_path)
        filename_base = (OdorRecording & key).fetch1('filename')
        analog_filename = os.path.join(local_path, filename_base + '_%d.h5')

        # Load olfactory data
        analog_data = h5.read_analog_olfaction_file(analog_filename)

        scan_times = h5.ts2sec(analog_data['ts'], is_packeted=True)
        binarized_signal = analog_data['scanImage'] > 2.7  # TTL voltage low/high threshold
        rising_edges = np.where(np.diff(binarized_signal.astype(int)) > 0)[0]
        frame_times = scan_times[rising_edges]

        # Correct NaN gaps in timestamps (mistimed or dropped packets during recording)
        if np.any(np.isnan(frame_times)):
            # Raise exception if first or last frame pulse was recorded in mistimed packet
            if np.isnan(frame_times[0]) or np.isnan(frame_times[-1]):
                msg = ('First or last frame happened during misstamped packets. Pulses '
                       'could have been missed: start/end of scanning is unknown.')
                raise PipelineException(msg)

            # Fill each gap of nan values with correct number of timepoints
            frame_period = np.nanmedian(np.diff(frame_times))  # approx
            nan_limits = np.where(np.diff(np.isnan(frame_times)))[0]
            nan_limits[1::2] += 1  # limits are indices of the last valid point before the nan gap and first after it
            correct_fts = []
            for i, (start, stop) in enumerate(zip(nan_limits[::2], nan_limits[1::2])):
                correct_fts.extend(frame_times[0 if i == 0 else nan_limits[2 * i - 1]: start + 1])
                num_missing_points = int(round((frame_times[stop] - frame_times[start]) /
                                               frame_period - 1))
                correct_fts.extend(np.linspace(frame_times[start], frame_times[stop],
                                               num_missing_points + 2)[1:-1])
            correct_fts.extend(frame_times[nan_limits[-1]:])
            frame_times = np.array(correct_fts)

        # Check that frame times occur at the same period
        frame_intervals = np.diff(frame_times)
        frame_period = np.median(frame_intervals)
        if np.any(abs(frame_intervals - frame_period) > 0.15 * frame_period):
            raise PipelineException('Frame time period is irregular')

        self.insert1({**key, 'signal_start_time': frame_times[0],
                      'signal_duration': frame_times[-1] - frame_times[0],
                      'frame_times': frame_times})

        print(f'ScanImage sync added for animal {key["animal_id"]}, '
              f'olfactory session {key["odor_session"]}, '
              f'recording {key["recording_idx"]}\n')
예제 #7
0
    def make(self, key):

        print(f'Populating trials for {key}')

        # Get olfactory h5 path and filename
        olfactory_path = (OdorSession & key).fetch1('odor_path')
        local_path = lab.Paths().get_local_path(olfactory_path)
        filename_base = (OdorRecording & key).fetch1('filename')
        digital_filename = os.path.join(local_path, filename_base + '_D_%d.h5')

        # Load olfactory data
        digital_data = h5.read_digital_olfaction_file(digital_filename)

        # Check valve data ends with all valves closed
        if digital_data['valves'][-1] != 0:
            msg = f'Error: Final valve state is open! Ending time cannot be calculated for {key}.'
            raise PipelineException(msg)

        valve_open_idx = np.where(digital_data['valves'] > 0)[0]
        trial_valve_states = digital_data['valves'][valve_open_idx]
        trial_start_times = h5.ts2sec(digital_data['ts'][valve_open_idx])

        # Shift start indices by one to get end indices
        trial_end_times = h5.ts2sec(digital_data['ts'][valve_open_idx + 1])

        # All keys are appended to a list and inserted at the end to prevent errors from halting mid-calculation
        all_trial_keys = []

        # Find all trials and insert a key for each channel open during each trial
        for trial_num, (state, start, stop) in enumerate(
                zip(trial_valve_states, trial_start_times, trial_end_times)):

            valve_array = OdorTrials.convert_valves(state)

            for valve_num in np.where(
                    valve_array
            )[0]:  # Valve array is already a boolean, look for all true values

                # We start counting valves at 1, not 0 like python indices
                valve_num = valve_num + 1
                trial_key = [
                    key['animal_id'], key['odor_session'],
                    key['recording_idx'], trial_num, valve_num, start, stop
                ]
                all_trial_keys.append(trial_key)

        self.insert(all_trial_keys)

        print(
            f'{valve_open_idx.shape[0]} odor trials found and inserted for {key}.\n'
        )
예제 #8
0
    def make(self, key):

        print(f'Populating Respiration for {key}')

        # Get olfactory h5 path and filename
        olfactory_path = (OdorSession & key).fetch1('odor_path')
        local_path = lab.Paths().get_local_path(olfactory_path)
        filename_base = (OdorRecording & key).fetch1('filename')
        analog_filename = os.path.join(local_path, filename_base + '_%d.h5')

        # Load olfactory data
        analog_data = h5.read_analog_olfaction_file(analog_filename)
        breath_times = h5.ts2sec(analog_data['ts'], is_packeted=True)
        breath_trace = analog_data['breath']

        # Correct NaN gaps in timestamps (mistimed or dropped packets during recording)
        if np.any(np.isnan(breath_times)):
            # Raise exception if first or last frame pulse was recorded in mistimed packet
            if np.isnan(breath_times[0]) or np.isnan(breath_times[-1]):
                msg = (
                    'First or last breath happened during misstamped packets. Pulses '
                    'could have been missed: start/end of collection is unknown.'
                )
                raise PipelineException(msg)

            # Linear interpolate between nans
            nans_idx = np.where(np.isnan(breath_times))[0]
            non_nans_idx = np.where(~np.isnan(breath_times))[0]
            breath_times[nans_idx] = np.interp(nans_idx, non_nans_idx,
                                               breath_times[non_nans_idx])
            print(
                f'Largest NaN gap found: {np.max(np.abs(np.diff(breath_times[non_nans_idx])))} seconds'
            )

        # Check that frame times occur at the same period
        breath_intervals = np.diff(breath_times)
        breath_period = np.median(breath_intervals)
        if np.any(
                abs(breath_intervals - breath_period) > 0.15 * breath_period):
            raise PipelineException('Breath time period is irregular')

        # Error check tracing and timing match
        if breath_trace.shape[0] != breath_times.shape[0]:
            raise PipelineException('Breath timing and trace mismatch!')

        breath_key = {**key, 'trace': breath_trace, 'times': breath_times}

        self.insert1(breath_key)
        print(f'Respiration data for {key} successfully inserted.\n')
예제 #9
0
파일: treadmill.py 프로젝트: zhoupc/ease
    def _make_tuples(self, key):
        #pull filename for key
        rel = experiment.Session() * experiment.Scan.BehaviorFile().proj(
            hdf_file='filename')
        info = (rel & key).fetch1()

        # replace number by %d for hdf-file reader
        tmp = info['hdf_file'].split('.')
        if not '%d' in tmp[0]:
            info['hdf_file'] = tmp[0][:-1] + '%d.' + tmp[-1]

        #read hdf file for ball data
        hdf_path = lab.Paths().get_local_path(
            "{behavior_path}/{hdf_file}".format(**info))
        data = read_video_hdf5(hdf_path)

        #read out counter time stamp and convert to seconds
        packet_length = data['analogPacketLen']
        ball_time, _ = ts2sec(data['ball'].transpose()[1], packet_length)

        #read out raw ball counts and integrate by 100ms intervals
        ball_raw = data['ball'].transpose()[0]
        ball_time_to_raw = interp1d(ball_time, ball_raw - ball_raw[0])
        bin_times = np.arange(ball_time[0], ball_time[-1], .1)
        bin_times[-1] = ball_time[-1]
        ball_counts = np.append([0], np.diff(ball_time_to_raw(bin_times)))

        #pull Treadmill specs, warn if more than one Treadmill fits session key
        diam, counts_per_revolution = (
            experiment.TreadmillSpecs() * experiment.Session() & key
            & 'treadmill_start_date <= session_date').fetch(
                'diameter', 'counts_per_revolution')
        if len(diam) != 1:
            raise PipelineException('Unclear which treadmill fits session key')

        #convert ball counts to cm/s for each ball time point
        cmPerCount = np.pi * diam[-1] / counts_per_revolution[-1]
        ball_time_to_vel = interp1d(bin_times, ball_counts * cmPerCount * 10)
        ball_vel = ball_time_to_vel(ball_time)

        #assign calculated properties to key
        key['treadmill_time'] = ball_time
        key['treadmill_raw'] = ball_raw
        key['treadmill_vel'] = ball_vel

        #insert and notify user
        self.insert1(key)
        self.notify({k: key[k] for k in self.heading.primary_key})
예제 #10
0
    def _make_tuples(self, key):
        # Get behavior filename
        behavior_path = (experiment.Session() & key).fetch1('behavior_path')
        local_path = lab.Paths().get_local_path(behavior_path)
        filename = (experiment.Scan.BehaviorFile() & key).fetch1('filename')
        full_filename = os.path.join(local_path, filename)

        # Read file
        data = h5.read_behavior_file(full_filename)

        # Get counter timestamps and convert to seconds
        ts = h5.ts2sec(data['ts'], is_packeted=True)

        # Read temperature (if available) and invalidate points with unreliable timestamps
        temp_raw = data.get('temperature', None)
        if temp_raw is None:
            raise PipelineException(
                'Scan {animal_id}-{session}-{scan_idx} does not have '
                'temperature data'.format(**key))
        temp_raw[np.isnan(ts)] = float('nan')

        # Read temperature and smooth it
        temp_celsius = (temp_raw * 100 - 32) / 1.8  # F to C
        sampling_rate = int(round(
            1 / np.nanmedian(np.diff(ts))))  # samples per second
        smooth_temp = signal.low_pass_filter(temp_celsius,
                                             sampling_rate,
                                             cutoff_freq=1,
                                             filter_size=2 * sampling_rate)

        # Resample at 1 Hz
        downsampled_ts = ts[::sampling_rate]
        downsampled_temp = smooth_temp[::sampling_rate]

        # Insert
        self.insert1({
            **key, 'temp_time': downsampled_ts,
            'temperatures': downsampled_temp,
            'median_temperature': np.nanmedian(downsampled_temp)
        })
        self.notify(key)
예제 #11
0
파일: treadmill.py 프로젝트: zhoupc/ease
    def _make_tuples(self, key):

        rel = experiment.Session() * experiment.Scan.BehaviorFile().proj(
            hdf_file='filename')

        info = (rel & key).fetch1()

        # replace number by %d for hdf-file reader
        tmp = info['hdf_file'].split('.')
        if not '%d' in tmp[0]:
            info['hdf_file'] = tmp[0][:-1] + '%d.' + tmp[-1]

        hdf_path = lab.Paths().get_local_path(
            "{behavior_path}/{hdf_file}".format(**info))

        data = read_video_hdf5(hdf_path)
        packet_length = data['analogPacketLen']
        dat_time, _ = ts2sec(data['ts'], packet_length)

        dat_fs = 1. / np.median(np.diff(dat_time))

        n = int(np.ceil(0.0002 * dat_fs))
        k = np.hamming(2 * n)
        k /= -k.sum()
        k[:n] = -k[:n]

        pulses = np.convolve(
            data['scanImage'], k,
            mode='full')[n:-n + 1]  # mode='same' with MATLAB compatibility

        peaks = spaced_max(pulses, 0.005 * dat_fs)
        peaks = peaks[pulses[peaks] > 0.1 * np.percentile(pulses[peaks], 90)]
        peaks = longest_contiguous_block(peaks)

        self.insert1(dict(key, frame_times=dat_time[peaks]))
        self.notify(key)
예제 #12
0
 def get_video_path(self):
     video_info = (experiment.Session() * experiment.Scan.PostureVideo()
                   & self).fetch1()
     video_path = lab.Paths().get_local_path(
         "{behavior_path}/{filename}".format(**video_info))
     return video_path
예제 #13
0
    def make(self, key):
        """ Read ephys data and insert into table """
        import h5py

        # Read the scan
        print('Reading file...')
        vreso_path, filename_base = (PatchSession *
                                     (Recording() & key)).fetch1(
                                         'recording_path', 'file_name')
        local_path = lab.Paths().get_local_path(vreso_path)
        filename = os.path.join(local_path, filename_base + '_%d.h5')
        with h5py.File(filename, 'r', driver='family', memb_size=0) as f:

            # Load timing info
            ANALOG_PACKET_LEN = f.attrs['waveform Frame Size'][0]

            # Get counter timestamps and convert to seconds
            patch_times = h5.ts2sec(f['waveform'][10, :], is_packeted=True)

            # Detect rising edges in scanimage clock signal (start of each frame)
            binarized_signal = f['waveform'][
                9, :] > 2.7  # TTL voltage low/high threshold
            rising_edges = np.where(
                np.diff(binarized_signal.astype(int)) > 0)[0]
            frame_times = patch_times[rising_edges]

            # Correct NaN gaps in timestamps (mistimed or dropped packets during recording)
            if np.any(np.isnan(frame_times)):
                # Raise exception if first or last frame pulse was recorded in mistimed packet
                if np.isnan(frame_times[0]) or np.isnan(frame_times[-1]):
                    msg = (
                        'First or last frame happened during misstamped packets. Pulses '
                        'could have been missed: start/end of scanning is unknown.'
                    )
                    raise PipelineException(msg)

                # Fill each gap of nan values with correct number of timepoints
                frame_period = np.nanmedian(np.diff(frame_times))  # approx
                nan_limits = np.where(np.diff(np.isnan(frame_times)))[0]
                nan_limits[
                    1::
                    2] += 1  # limits are indices of the last valid point before the nan gap and first after it
                correct_fts = []
                for i, (start, stop) in enumerate(
                        zip(nan_limits[::2], nan_limits[1::2])):
                    correct_fts.extend(
                        frame_times[0 if i == 0 else nan_limits[2 * i -
                                                                1]:start + 1])
                    num_missing_points = int(
                        round((frame_times[stop] - frame_times[start]) /
                              frame_period - 1))
                    correct_fts.extend(
                        np.linspace(frame_times[start], frame_times[stop],
                                    num_missing_points + 2)[1:-1])
                correct_fts.extend(frame_times[nan_limits[-1]:])
                frame_times = np.array(correct_fts)

                # Record the NaN fix
                num_gaps = int(len(nan_limits) / 2)
                nan_length = sum(nan_limits[1::2] -
                                 nan_limits[::2]) * frame_period  # secs

            ####### WARNING: FRAME INTERVALS NOT ERROR CHECKED - TEMP CODE #######
            # Check that frame times occur at the same period
            frame_intervals = np.diff(frame_times)
            frame_period = np.median(frame_intervals)
            #if np.any(abs(frame_intervals - frame_period) > 0.15 * frame_period):
            #    raise PipelineException('Frame time period is irregular')

            # Drop last frame time if scan crashed or was stopped before completion
            valid_times = ~np.isnan(
                patch_times[rising_edges[0]:rising_edges[-1]]
            )  # restricted to scan period
            binarized_valid = binarized_signal[
                rising_edges[0]:rising_edges[-1]][valid_times]
            frame_duration = np.mean(binarized_valid) * frame_period
            falling_edges = np.where(
                np.diff(binarized_signal.astype(int)) < 0)[0]
            last_frame_duration = patch_times[
                falling_edges[-1]] - frame_times[-1]
            if (np.isnan(last_frame_duration) or last_frame_duration < 0
                    or abs(last_frame_duration - frame_duration) >
                    0.15 * frame_duration):
                frame_times = frame_times[:-1]

            ####### WARNING: NO CORRECTION APPLIED - TEMP CODE #######
            voltage = np.array(f['waveform'][1, :], dtype='float32')
            current = np.array(f['waveform'][0, :], dtype='float32')
            command = np.array(f['waveform'][5, :], dtype='float32')

            ####### WARNING: DUMMY VARIABLES - TEMP CODE #######
            vgain = 0
            igain = 0
            command_gain = 0

            self.insert1({
                **key, 'voltage': voltage,
                'current': current,
                'command': command,
                'patch_times': patch_times,
                'frame_times': frame_times,
                'vgain': vgain,
                'igain': igain,
                'command_gain': command_gain
            })
예제 #14
0
    def _make_tuples(self, key):
        # Get behavior filename
        behavior_path = (experiment.Session() & key).fetch1('behavior_path')
        local_path = lab.Paths().get_local_path(behavior_path)
        filename = (experiment.Scan.BehaviorFile() & key).fetch1('filename')
        full_filename = os.path.join(local_path, filename)

        # Read file
        data = h5.read_behavior_file(full_filename)

        # Read counter timestamps and convert to seconds
        timestamps_in_secs = h5.ts2sec(data['ts'], is_packeted=True)

        # Detect rising edges in scanimage clock signal (start of each frame)
        binarized_signal = data[
            'scanImage'] > 2.7  # TTL voltage low/high threshold
        rising_edges = np.where(np.diff(binarized_signal.astype(int)) > 0)[0]
        frame_times = timestamps_in_secs[rising_edges]

        # Correct NaN gaps in timestamps (mistimed or dropped packets during recording)
        if np.any(np.isnan(frame_times)):
            # Raise exception if first or last frame pulse was recorded in mistimed packet
            if np.isnan(frame_times[0]) or np.isnan(frame_times[-1]):
                msg = (
                    'First or last frame happened during misstamped packets. Pulses '
                    'could have been missed: start/end of scanning is unknown.'
                )
                raise PipelineException(msg)

            # Fill each gap of nan values with correct number of timepoints
            frame_period = np.nanmedian(np.diff(frame_times))  # approx
            nan_limits = np.where(np.diff(np.isnan(frame_times)))[0]
            nan_limits[
                1::
                2] += 1  # limits are indices of the last valid point before the nan gap and first after it
            correct_fts = []
            for i, (start,
                    stop) in enumerate(zip(nan_limits[::2], nan_limits[1::2])):
                correct_fts.extend(
                    frame_times[0 if i == 0 else nan_limits[2 * i - 1]:start +
                                1])
                num_missing_points = int(
                    round((frame_times[stop] - frame_times[start]) /
                          frame_period - 1))
                correct_fts.extend(
                    np.linspace(frame_times[start], frame_times[stop],
                                num_missing_points + 2)[1:-1])
            correct_fts.extend(frame_times[nan_limits[-1]:])
            frame_times = correct_fts

            # Record the NaN fix
            num_gaps = int(len(nan_limits) / 2)
            nan_length = sum(nan_limits[1::2] -
                             nan_limits[::2]) * frame_period  # secs
            experiment.Fixes.insert1(key, skip_duplicates=True)
            experiment.Fixes.IrregularTimestamps.insert1({
                **key, 'num_gaps': num_gaps,
                'num_secs': nan_length
            })

        # Check that frame times occur at the same period
        frame_intervals = np.diff(frame_times)
        frame_period = np.median(frame_intervals)
        if np.any(abs(frame_intervals - frame_period) > 0.15 * frame_period):
            raise PipelineException('Frame time period is irregular')

        # Drop last frame time if scan crashed or was stopped before completion
        valid_times = ~np.isnan(
            timestamps_in_secs[rising_edges[0]:rising_edges[-1]]
        )  # restricted to scan period
        binarized_valid = binarized_signal[rising_edges[0]:rising_edges[-1]][
            valid_times]
        frame_duration = np.mean(binarized_valid) * frame_period
        falling_edges = np.where(np.diff(binarized_signal.astype(int)) < 0)[0]
        last_frame_duration = timestamps_in_secs[
            falling_edges[-1]] - frame_times[-1]
        if (np.isnan(last_frame_duration) or last_frame_duration < 0
                or abs(last_frame_duration - frame_duration) >
                0.15 * frame_duration):
            frame_times = frame_times[:-1]

        self.insert1({**key, 'frame_times': frame_times})
        self.notify(key)