Example #1
0
        return len(self.splines)

    def __call__(self, t):
        return np.vstack([s(t - self.t0) for s in self.splines])


m = 'animal_id in (20505, 20322, 20457, 20210, 20892)'

keys = (fuse.Activity() * stimulus.Sync * tune.StimulusType
        & stimulus.Trial * stimulus.Condition & m
        & 'stimulus_type = "stimulus.Trippy"').fetch('KEY')

key = keys[0]

pipe = (fuse.Activity() & key).module
num_frames = (pipe.ScanInfo() & key).fetch1('nframes')
num_depths = len(
    dj.U('z') & (pipe.ScanInfo.Field().proj('z', nomatch='field') & key))

frame_times = (stimulus.Sync() & key).fetch1('frame_times',
                                             squeeze=True)  # one per depth
assert num_frames <= frame_times.size / num_depths <= num_frames + 1
frame_times = frame_times[:num_depths *
                          num_frames:num_depths]  # one per volume

units = pipe.ScanSet.Unit * pipe.MaskClassification.Type & {'type': 'soma'}
spikes = pipe.Activity.Trace * pipe.ScanSet.UnitInfo & units & key

trace_keys, traces, ms_delay = spikes.fetch('KEY', 'trace', 'ms_delay')

print('Done')
Example #2
0
#!/usr/local/bin/python3
from pipeline import experiment, reso, meso, fuse, stack, pupil, treadmill, posture
from stimulus import stimulus
from stimline import tune

# # Scans
# for priority in range(120, -130, -10):  # highest to lowest priority
#     next_scans = (experiment.AutoProcessing() & 'priority > {}'.format(priority) &
#                   (experiment.Scan() & 'scan_ts > "2019-01-01 00:00:00"'))

next_scans = (experiment.AutoProcessing() &
              (experiment.Scan() & 'scan_ts > "2019-01-01 00:00:00"'))

# stimulus
stimulus.Sync().populate(next_scans, reserve_jobs=True, suppress_errors=True)
stimulus.BehaviorSync().populate(next_scans,
                                 reserve_jobs=True,
                                 suppress_errors=True)

# treadmill, pupil, posture
treadmill.Treadmill().populate(next_scans,
                               reserve_jobs=True,
                               suppress_errors=True)
pupil.Eye().populate(next_scans, reserve_jobs=True, suppress_errors=True)
pupil.FittedPupil().populate(next_scans,
                             reserve_jobs=True,
                             suppress_errors=True)
posture.Posture().populate(next_scans, reserve_jobs=True, suppress_errors=True)

# stack
stack.StackInfo().populate(stack.CorrectionChannel(),
Example #3
0
    def make(self, key):
        print("Populating\n", pformat(key, indent=10))
        repeats = (stimulus.Clip().aggr(stimulus.Trial() & key,
                                        repeats="count(movie_name)")
                   & "repeats > 2")
        scan, ndepths = self.load(key)

        frame_times = (stimulus.Sync() & key).fetch1("frame_times").squeeze()
        frame_times = frame_times[key["field"] - 1::ndepths]

        ft_min = frame_times.min()
        frame_times = frame_times - ft_min

        if 0 <= np.abs(frame_times.size - scan.shape[-1]) < 20:
            print("Shortening length of frametimes and scan to the same size")
            ml = min(frame_times.size, scan.shape[-1])
            scan = scan[..., :ml]
            frame_times = frame_times[:ml]
        else:
            raise ValueError(
                "Difference in frametimes and scan length greater 20 frames")

        downsample_to = 0.250  # 4 Hz
        h = np.hamming(2 *
                       int(downsample_to // np.median(np.diff(frame_times))) +
                       1).astype(np.float32)
        h /= h.sum()

        oracles, data, data_shuffle = [], [], []
        *spatial_dim, T = scan.shape
        scan = scan.reshape((-1, T))
        # permute = lambda x: x[np.random.permutation(len(x))]
        for condition in (dj.U("condition_hash") & repeats).fetch(dj.key):
            # --- load fliptimes
            trial_keys, flip_times = (stimulus.Trial() & key
                                      & condition).fetch(dj.key, "flip_times")
            l = np.min([ft.size for ft in flip_times])
            flip_times = [ft.squeeze()[:l] - ft_min for ft in flip_times]
            flip_times = [
                np.arange(ft.min(), ft.max(), downsample_to)
                for ft in flip_times
            ]  # downsample to 4 Hz

            # --- smooth trial, subsample, compute mean
            movs = []
            for ft in tqdm(flip_times, desc="Trial: "):
                movs.append(
                    np.vstack([
                        np.interp(ft, frame_times,
                                  np.convolve(px, h, mode="same"))
                        for px in scan
                    ]).reshape(tuple(spatial_dim) + (len(ft), )))
            mov = np.stack(movs, axis=0)
            mu = mov.mean(axis=0, keepdims=True)

            r, *_, t = mov.shape
            oracle = (mu - mov / r) * r / (r - 1)
            spatial_dim = tuple(spatial_dim)
            oracles.append(oracle.transpose([0, 3, 1, 2]).astype(np.float32))
            data.append(mov.transpose([0, 3, 1, 2]).astype(np.float32))
        key["oracle_map"], key["p_map"] = corr(
            np.concatenate(data, axis=0),
            np.concatenate(oracles, axis=0),
            axis=(0, 1),
            return_p=True,
        )
        key["oracle"] = key["oracle_map"].mean()
        self.insert1(key)
Example #4
0
animals = (20505, 20322, 20457, 20210, 20892)
sessions = (fuse.ScanDone * stimulus.Sync & 'animal_id in (20505, 20322, 20457, 20210, 20892)'
            & (stimulus.Trial * stimulus.Monet2) & (stimulus.Trial * stimulus.Trippy)).fetch('KEY')
key = sessions[2]   # pick one

cache = {}
for key in sessions:
    folder = os.path.join(os.path.abspath(data_root_path), 'sessions', dj.hash.key_hash(key)[:6])
    if os.path.isdir(folder):
        print(folder, 'already extracted')
        continue
    print('load frame times.')
    pipe = (fuse.Activity() & key).module
    num_frames = (pipe.ScanInfo() & key).fetch1('nframes')
    num_depths = len(dj.U('z') & (pipe.ScanInfo.Field().proj('z', nomatch='field') & key))
    frame_times = (stimulus.Sync() & key).fetch1('frame_times', squeeze=True)  # one per depth
    assert num_frames <= frame_times.size / num_depths <= num_frames + 1
    frame_times = frame_times[:num_depths * num_frames:num_depths]  # one per volume

    print('load and cache soma traces')
    trace_hash = dj.hash.key_hash({k: v for k, v in key.items() if k not in {'stimulus_type'}})
    archive = cache_path and os.path.join(cache_path, trace_hash + '-traces.npz')
    if archive and os.path.isfile(archive):
        # load from cache
        data = np.load(archive, allow_pickle=True)
        trace_keys = data['trace_keys']
        traces = data['traces']
        delay = data['delay']
    else:
        units = pipe.ScanSet.Unit * pipe.MaskClassification.Type & {'type': 'soma'}
        spikes = pipe.Activity.Trace * pipe.ScanSet.UnitInfo & units & key
Example #5
0
 def key_source(self):
     rel2 = (stimulus.Clip() * fuse.MotionCorrection() * stimulus.Movie()
             & 'movie_class in ("cinema", "youtube", "unreal")').aggr(
                 stimulus.Trial(), repeats="count(movie_name)")
     return AstrocyteScans.Field & stimulus.Sync() & (
         rel2 & "repeats > 2").proj()