def full_trace(record_file: File, ax: Axes): activity = DataFrame.load(record_file["spike"]) lever = load_mat(record_file['response']) new_lever = resample(lever.values[0], lever.sample_rate, record_file.attrs['frame_rate']) for x in range(activity.shape[0]): ax.plot(_scale(activity.values[x, :] / 5)) ax.plot(_scale(new_lever) / 2 - 3, color=COLORS[0])
def svr_parameters(data_file: File, info: Dict[str, str]): lever = load_mat(data_file['response']) values = devibrate(lever.values[0], sample_rate=lever.sample_rate) y = InterpolatedUnivariateSpline(lever.axes[0], values)(data_file['spike']['y'])[1:] X = data_file['spike']['data'][:, 1:] gammas = np.linspace(-8, -5, 12, endpoint=False) Cs = np.linspace(3, 15, 12, endpoint=False) def pred(gamma, C): hat = cross_predict(X, y, svr.predictor_factory(y, gamma=10**gamma, C=C, epsilon=1E-3), section_mi=False) return mutual_info(y, hat) res = map_table(pred, gammas, Cs) save_path = join(res_folder, f"svr_params_test_{info['id']}_{info['session']}.npz") np.savez_compressed(save_path, values=np.asarray(res), axes=[gammas, Cs]) res_df = DataFrame(np.asarray(res), [gammas, Cs]) with Figure() as (ax, ): labeled_heatmap(ax, res_df.values, res_df.axes[1], res_df.axes[0]) print('done')
def compare_decoder(): import matplotlib.pyplot as plt data_file = files['wt'][2] lever = load_mat(data_file['response']) y = InterpolatedUnivariateSpline(lever.axes[0], lever.values[0])( data_file['spike']['y'])[1:] X = data_file['spike']['data'][:, 1:] bounds: Bounds = (tuple(np.quantile(y, [0.001, 0.999])), (-2, 1), (-5, 5) ) # type: ignore print("mutual info:") plt.plot(y, color=COLORS[0], alpha=0.5, label="trajectory") path_hats = dict() powers = dict() for color, (name, decoder) in zip( COLORS[1:], (("particle", particle.decoder_factory(bounds)), ("kalman", kalman.decoder_factory(bounds)), ("linear", linear.decoder_factory(bounds)), ("svr", svr.decoder_factory(SVR('rbf', 3, 1E-7, cache_size=1000))))): path_hat, power = cross_predict(X, y, decoder) info = mutual_info(y, path_hat) path_hats[name] = path_hat powers[name] = power plt.plot(path_hat, color=color, alpha=0.5, label="{}: {}".format(name, info)) plt.legend()
def decoder_power(data_file: File, predictor_factory: Callable[[np.ndarray], Decoder]) -> float: lever = load_mat(data_file['response']) y = InterpolatedUnivariateSpline(lever.axes[0], lever.values[0])( data_file['spike']['y'])[1:] X = data_file['spike']['data'][:, 1:] decoder = predictor_factory(y) lever_hat, powers = cross_predict(X, y, decoder) return mutual_info(lever_hat, y)
def get_hitrate(data_file: File) -> float: lever = load_mat(data_file['response']) params = { x: y for x, y in motion_params.items() if x in ('quiet_var', 'window_size', 'event_thres') } _, _, _, correct_trials, _ = find_response_onset(lever, **params) return correct_trials.mean()
def all_traces(record_file: File, ax: Axes): """plot full traces of all neurons and trial onsets""" lever_trajectory = load_mat(record_file["response"]) calcium_trace = _scale(DataFrame.load(record_file["measurement"]).values) time = np.linspace(0, lever_trajectory.shape[1] / lever_trajectory.sample_rate, lever_trajectory.shape[1]) ax.plot(time, _scale(lever_trajectory.values[0]) - 5, COLORS[1]) for idx, row in enumerate(calcium_trace): ax.plot(time, row + idx * 5) for point in lever_trajectory.timestamps / lever_trajectory.sample_rate: # trial onsets ax.axvline(x=point, color=COLORS[2])
def test_loadmat(): gerald_log = join( resource_filename(Requirement.parse("lever"), "lever/test/data"), "lever.mat") log = load_mat(gerald_log) assert (log.trial_time == 5) assert (log.timestamps.shape[0] == 117) assert (log.sample_rate == 256) assert (log.stimulus['config']['reward'] == 6) assert (abs(log.stimulus['config']['lev_baseline'] - 3.94221418) < 1E-6)
def get_delay(data_file: File) -> float: lever = load_mat(data_file['response']) params = { x: y for x, y in motion_params.items() if x in ('quiet_var', 'window_size', 'event_thres') } motion_onsets, stim_onsets, _, correct_trials, _ = find_response_onset( lever, **params) return np.mean( (motion_onsets - stim_onsets[correct_trials]) / lever.sample_rate)
def null_fit(data_file): lever = load_mat(data_file['response']) y = InterpolatedUnivariateSpline(lever.axes[0], lever.values[0])( data_file['spike']['y'])[1:] X = data_file['spike']['data'][:, 1:] res = list() for _ in range(200): y_perm = np.random.permutation(y.copy()) y_hat_perm, _ = cross_predict(X, y_perm, svr.predictor_factory(y_perm)) res.append(mutual_info(y_perm, y_hat_perm)) np.savez_compressed(join(project_folder, "report", "measure", ""), res=res)
def show_traces(): data_file = files['wt'][0] lever = load_mat(data_file['response']) values = devibrate(lever.values[0], sample_rate=lever.sample_rate) y = InterpolatedUnivariateSpline(lever.axes[0], values)(data_file['spike']['y'])[1:] X = data_file['spike']['data'][:, 1:] lever_hat, powers = cross_predict(X, y, svr.predictor_factory(y)) lever_hat = svr.predictor_factory(y)(X, y, X) plt.plot(y, color='blue') plt.plot(lever_hat, color='red') print("mutual info: ", mutual_info(y, lever_hat))
def get_linkage(record_file: File, params: Dict[str, float]) -> np.ndarray: """Load record file and get linkage matrix between the trajectories of trials. Args: record_file: the record file with at least the lever file motion_params: param dict including window size and push threshold """ lever = load_mat(record_file['response']) lever.center_on("motion", **params) lever.fold_trials() mask, lever_trials = devibrate_trials(lever.values[0], params['pre_time'], sample_rate=lever.sample_rate) dist_mat = trace_cluster(lever_trials[mask, ...]) return linkage(dist_mat), mask
def svr_power(data_file: File, neuron_no: int = 20) -> Tuple[float, List[float]]: lever = load_mat(data_file['response']) values = devibrate(lever.values[0], sample_rate=lever.sample_rate) y = InterpolatedUnivariateSpline(lever.axes[0], values)(data_file['spike']['y'])[1:] X = data_file['spike']['data'][:, 1:].copy() decoder = svr.predictor_factory(y, gamma=3E-9, C=12, epsilon=1E-3) single_power = [ cross_predict(x[newaxis, :], y, decoder, section_mi=True)[1].mean() for x in X ] mask = np.greater_equal(single_power, sorted(single_power)[-neuron_no]) path_hat, _ = cross_predict(X[mask, :], y, decoder) return mutual_info(y, path_hat), single_power
def show_correspondance(ax: Axes, record_file: File, motion_params: Dict[str, float]): def scale(x): x -= x.mean() x /= x.std() return x lever = load_mat(record_file['response']) lever.center_on("motion", **motion_params) activity = DataFrame.load(record_file["measurement"]) neuron_rate = record_file.attrs['frame_rate'] trials = filter_empty_trials(ts.fold_by(activity, lever, neuron_rate, True)) slow_lever = ts.resample(lever.fold_trials().values, lever.sample_rate, neuron_rate, -1)[:, 0:trials.shape[1], :] ax.plot(scale(trials.values[0].reshape(-1)), 'green') ax.plot(scale(slow_lever.reshape(-1)), 'red')
def example_traces(ax: Axes, record_file: File, start: float, end: float, cells: Set[int]): """Visualize calcium trace of cells and the lever trajectory""" lever_trajectory = load_mat(record_file["response"]) calcium_trace = DataFrame.load(record_file["measurement"]) neuron_rate = record_file.attrs['frame_rate'] l_start, l_end = np.rint(np.multiply([start, end], lever_trajectory.sample_rate)).astype(np.int_) c_start, c_end = np.rint(np.multiply([start, end], neuron_rate)).astype(np.int_) ax.plot(np.linspace(0, l_end - l_start, l_end - l_start), # lever trajectory _scale(lever_trajectory.values[0][l_start: l_end]), COLORS[1]) time = np.linspace(0, calcium_trace.shape[1] / neuron_rate, lever_trajectory.shape[1]) spacing = iter(range(0, 500, 2)) for idx, row in enumerate(calcium_trace.values): if idx in cells: ax.plot(time[c_start: c_end] - l_start, _scale(row[c_start: c_end]) + next(spacing)) stim_onsets = lever_trajectory.timestamps[ (lever_trajectory.timestamps > l_start) & (lever_trajectory.timestamps < l_end)]\ / lever_trajectory.sample_rate - l_start for x in stim_onsets: ax.axvline(x=x, color=COLORS[2])
def examine_saline(data_file): data_file = dredd_files['cno'][5] lever = load_mat(data_file['response']) values = devibrate(lever.values[0], sample_rate=lever.sample_rate) y = InterpolatedUnivariateSpline(lever.axes[0], values)(data_file['spike']['y'])[1:] X = data_file['spike']['data'][:, 1:] decoder = svr.predictor_factory(y, gamma=3E-7, C=11, epsilon=1E-3) single_power = [ mutual_info(y, cross_predict(x[newaxis, :], y, decoder, section_mi=False)) for x in X ] hat_0 = cross_predict(X, y, decoder, section_mi=False) mask = np.greater_equal(single_power, sorted(single_power)[-20]) hat_1 = cross_predict(X[mask, :], y, decoder, section_mi=False) plt.plot(y, color='blue') hat = decoder(X, y, X) plt.plot(hat, color='green') plt.plot(hat_0, color='red') plt.plot(hat_1, color='orange') print("hat_1: ", mutual_info(hat_1, y), " hat_0: ", mutual_info(hat_0, y))
def load(self) -> SparseRec: log_path = self.file_path.joinpath("original", "log", self.name + ".mat") return load_mat(str(log_path))
def get_trials(data_file: File, motion_params: MotionParams) -> SparseRec: lever = load_mat(data_file['response']).center_on( "motion", **motion_params).fold_trials() lever.values = np.squeeze(lever.values, 0) lever.axes = lever.axes[1:] return lever