コード例 #1
0
ファイル: log.py プロジェクト: Palpatineli/lever
    def time(self) -> float:
        log_path = self.file_path.joinpath("original", "log", self.name + ".mat")
        return log_path.stat().st_mtime

    def load(self) -> SparseRec:
        log_path = self.file_path.joinpath("original", "log", self.name + ".mat")
        return load_mat(str(log_path))

input_log = Input(LogInput, "2019-04-30T18:01")

def filter_log(log: SparseRec) -> SparseRec:
    log.values[0] = devibrate(log.values[0], log.sample_rate)
    return log

task_filtered_log = Task(filter_log, "2019-04-30T18:28", "filtered-log")
res_filter_log = task_filtered_log(input_log)

def make_trial_log(log: SparseRec, params: Dict[str, float], center: str = "motion") -> SparseRec:
    lever = log.center_on(center, **params).fold_trials()
    lever.values = np.squeeze(lever.values, 0)
    lever.axes = lever.axes[1:]
    return devibrate_rec(lever, params)

task_trial_log = Task(make_trial_log, "2019-04-30T18:38", "trial-log", extra_args=(motion_params, ))
res_trial_log = task_trial_log(res_filter_log)

quiet_motion_params = {"quiet_var": 0.001, "window_size": 1000, "event_thres": 0.3, "pre_time": 1.0, "post_time": 1.0}
task_trial_log_quiet = Task(make_trial_log, "2019-04-30T18:38", "trial-log", extra_args=(quiet_motion_params, ))
res_trial_log_quiet = task_trial_log_quiet(res_filter_log)
コード例 #2
0
    max_cutoff = int(round((trial_log.sample_rate * peak_window[1])))
    max_idx = trial_log.values[:, cutoff:max_cutoff].argmax(axis=1) + cutoff
    amplitude = trial_log.values[range(trial_log.shape[0]),
                                 max_idx] - trial_log.values[:, :cutoff].mean(
                                     axis=1)
    speed = np.array([
        np.diff(x[5:max(idx, cutoff * 2)]).max()
        for x, idx in zip(trial_log.values, max_idx)
    ])
    reliability = _reliability(trial_log.values[:, cutoff:max_cutoff])
    return amplitude, speed, delay, hit_rate, reliability


peak_window = (motion_params['pre_time'], 0.3)
task_behavior = Task(get_behavior,
                     "2019-06-21T13:02",
                     "delay-hitrate",
                     extra_args=(peak_window, ))
res_behavior = task_behavior(res_filter_log)


##
def main():
    result = get_result(mice.name.to_list(), [res_behavior],
                        "astrocyte_exp_log")[0]
    return result


def merge_behavior(result: List[Tuple[np.ndarray, np.ndarray, np.ndarray,
                                      np.ndarray, float]]):
    grouping: pd.DataFrame = pd.read_csv(
        proj_folder.joinpath("data", "index", "grouping.csv"))  # type: ignore
コード例 #3
0
    onset = np.rint(trace.timestamps * 5 / 256).astype(int)
    trajectory: np.ndarray = take_segment(trace.values, onset, trial_samples)
    speed: np.ndarray = take_segment(np.diff(trace.values), onset - 1,
                                     trial_samples)
    y = np.array([
        take_segment(neuron, onset, trial_samples) for neuron in spike.values
    ])
    preds = Predictors((0, delay_sample), [delay_period], [hit, delay],
                       [trajectory, speed])
    grouping = Grouping([1, 2], [3], [4, 5], [6, 7])
    preds, grouping = build_predictor(preds, grouping, hit, spline)
    return preds, y, grouping


task_predictor = Task(get_predictor,
                      "2020-02-20T13:04",
                      "encoding-predictor-minimal",
                      extra_args=(5, bspline_set(np.arange(7), 2)))
res_predictor = task_predictor([res_behavior, res_align_xy])

task_encoding = Task(run_encoding, "2020-02-24T09:29", "encoding-r2-minimal")
res_encoding = task_encoding(res_predictor)

from encoding_model.main import build_model
task_model = Task(build_model, "2019-06-27T21:18", "encoding-model")
res_model = task_model(res_predictor)
predictor_names = [
    "start", "reward", "isMoving", "hit", "delay", "trajectory", "speed", "all"
]


##
コード例 #4
0
Task.save_folder = proj_folder.joinpath("data", "interim")
mice: pd.DataFrame = pd.read_csv(
    proj_folder.joinpath("data", "index",
                         "index.csv")).set_index(["id",
                                                  "session"])  # type: ignore


def make_trial_neuron(
        trial_log: SparseRec, spike_framerate: Tuple[Dict[str, np.ndarray],
                                                     float]) -> DataFrame:
    spikes, frame_rate = spike_framerate
    # trial_neurons should be [neuron, trial, time_points]
    return fold_by(DataFrame.load(spikes), trial_log, frame_rate, True)


task_trial_neuron = Task(make_trial_neuron, "2019-05-02T16:27", "trial-neuron")
res_trial_neuron = task_trial_neuron([res_trial_log, res_spike])


def scale(x: np.ndarray) -> np.ndarray:
    """x should not be larger than 3D. scale on last axis"""
    std = x.std(axis=-1, keepdims=True)
    mask = std == 0
    std[mask] = 1
    z = (x - x.mean(axis=-1, keepdims=True)) / std
    tile_size = (z.shape[0], ) if z.ndim == 1 else ((
        1, z.shape[1]) if z.ndim == 2 else (1, 1, z.shape[2]))
    z[np.tile(mask, tile_size)] = 0
    return z

コード例 #5
0
    return session

input_tif = Input(AlignmentInput, "2015-01-01T00:00", "tif")

class AlignmentCache(FileObj):
    def save(self, obj):
        obj.save(self.file_path, draw_limit=True)

    def load(self) -> Alignment:
        return Alignment.load(str(self.file_path))

    def time(self) -> float:
        displacement = self.file_path.joinpath("displacement.npy")
        return displacement.stat().st_mtime if displacement.exists() else 0

task_align = Task(make_align, "2018-04-30T15:12", "align", file_cacher=AlignmentCache)
res_align = task_align(input_tif)

class RoiInput(InputObj):
    def time(self) -> float:
        zips = list(self.file_path.joinpath("interim", "align", self.name).glob("*.zip"))
        if len(zips) == 0:
            return 0
        return sorted(zip_file.stat().st_mtime for zip_file in zips)[-1]

    def load(self) -> Union[float, List[Roi]]:
        zips = list(self.file_path.joinpath("interim", "align", self.name).glob("*.zip"))
        if len(zips) == 0:
            return 0
        return read_roi_zip(str(sorted((zip_file.stat().st_mtime, zip_file) for zip_file in zips)[-1][1]))
コード例 #6
0
    def time(self) -> float:
        targets = list(
            self.file_path.joinpath("input-1").glob(self.name + "+*"))
        if len(targets) == 0:
            return 0
        return sorted(entry.stat().st_mtime for entry in targets)[-1]


input_1 = Input(Input1, "2019-04-26T17:12")


def step_2(x):
    return int(float(x)) + 3


task_2 = Task(step_2, "2019-04-26T17:12", file_cacher=AlignedImageCache)


class Input3(InputObj):
    def load(self, *args) -> Any:
        targets = list(
            self.file_path.joinpath("input-3").glob(self.name + "x*"))
        return sorted((entry.stat().st_mtime, int(entry.name.split('x')[1]))
                      for entry in targets)[-1][1]

    def time(self) -> float:
        targets = list(
            self.file_path.joinpath("input-3").glob(self.name + "x*"))
        if len(targets) == 0:
            return 0
        return sorted(entry.stat().st_mtime for entry in targets)[-1]
コード例 #7
0
mice: pd.DataFrame = pd.read_csv(
    proj_folder.joinpath("data", "index",
                         "index.csv")).set_index(["id",
                                                  "session"])  # type: ignore
grouping: pd.DataFrame = pd.read_csv(
    proj_folder.joinpath("data", "index",
                         "grouping.csv")).set_index(["id", "session"
                                                     ])  # type: ignore


def make_dtw_hierarchy(trial_log: SparseRec) -> np.ndarray:
    return linkage(
        [fastdtw(x, y)[0] for x, y in combinations(trial_log.values, 2)])


task_dtw_hierarchy = Task(make_dtw_hierarchy, "2019-05-02T17:35",
                          "dtw-hiearchy")
res_linkage = task_dtw_hierarchy(log.res_trial_log)


def make_threshold(linkage_mat: np.ndarray) -> float:
    return get_threshold(linkage_mat)


task_threshold = Task(make_threshold, "2019-05-02T17:46", "get-threshold")
res_threshold = task_threshold(res_linkage)


def make_cluster(linkage_mat: np.ndarray, threshold: float) -> np.ndarray:
    """
    Args:
        linkage_mat: N x 4 linkage mat from scipy.cluster.hierarchy.linkage
コード例 #8
0
    "post_time": 1.9
}


def make_sample_neurons(spike_framerate: Tuple[Dict[str, np.ndarray], float],
                        log: SparseRec, params: Dict[str, float]) -> SparseRec:
    lever = log.center_on("motion", **params).fold_trials()
    lever.values = np.squeeze(lever.values, 0)
    lever.axes = lever.axes[1:]
    filtered = devibrate_rec(lever, params)
    spikes, frame_rate = spike_framerate
    return fold_by(DataFrame.load(spikes), filtered, frame_rate, True)


task_trial_neuron = Task(make_sample_neurons,
                         "2020-09-01T23:04",
                         "trial-neuron-2s",
                         extra_args=(motion_params, ))
res_trial_neuron = task_trial_neuron([res_spike, res_filter_log])


##
def main():
    trial_neurons = get_result([x.name for x in mice][0:1], [res_trial_neuron],
                               'trial-neuron-2s-run')
    values = trial_neurons[0][0].values
    with Figure(fig_folder.joinpath("classifier", "example-neurons.svg"),
                show=True) as axes:
        for id_neuron, neuron in enumerate(values[:20, 0:4, :]):
            for id_trial, trial in enumerate(neuron):
                axes[0].plot(range(id_trial * 11, id_trial * 11 + 10),
                             trial / trial.max() * 5 + id_neuron * 6,
コード例 #9
0
ファイル: decoder.py プロジェクト: Palpatineli/lever
    Returns:
        X: spikes scaled
        y: lever trajectory resampled to the sample rate of spikes
    """
    spike, sample_rate = spike_sample_rate
    resampled_trace = InterpolatedUnivariateSpline(
        filterd_log.axes[1], filterd_log.values[0])(spike['y'])
    y = filterd_log.create_like(scale_features(resampled_trace),
                                [spike['y']])  # type: ignore
    y.sample_rate = sample_rate
    spike_df = DataFrame(scale_features(spike['data'], axes=1),
                         [spike['x'], spike['y']])  # type: ignore
    return spike_df, y


task_align_xy = Task(align_XY, "2019-05-23T18:37", "align-xy")
res_align_xy = task_align_xy([res_spike, res_filter_log])


def neuron_info(spike_trajectory: Tuple[DataFrame, SparseRec],
                svr_params: Dict[str, float]) -> np.ndarray:
    """Give the prediction power of individual neurosn on push trajectory predicted in a rbf SVR."""
    spike, trajectory = spike_trajectory
    y = trajectory.values
    X = spike.values
    svr = SVR('rbf', **svr_params)
    y_hat_list = [
        svr.fit(n.reshape(-1, 1), y).predict(n.reshape(-1, 1)) for n in X
    ]
    # y_real, y_hat_array = list(), list()
    # for X_tr, y_tr, X_te, y_te in split_time_series(X, y, 10):