def gaussian_ellipse_place_cell(df, Xcenter, Ycenter, sigma_x, sigma_y, bat_name=0): if isinstance(bat_name, list): n = 0 for i in bat_name: n |= gaussian_ellipse_place_cell(df, Xcenter, Ycenter, sigma_x, sigma_y, i) return n bat_name = str(bat_name) bat_X = df[dataset.get_col_name(bat_name, "X")] bat_Y = df[dataset.get_col_name(bat_name, "Y")] dx_sq = (bat_X - Xcenter)**2 dy_sq = (bat_Y - Ycenter)**2 sigma_x /= 2 sigma_y /= 2 gaussian_pow = dx_sq / (2 * sigma_x**2) + dy_sq / (2 * sigma_y**2) gaussian_vals = 1 * np.exp(-gaussian_pow) n = (gaussian_vals > np.random.random(len(gaussian_vals))).astype('int') return n
def add_pairwise_features(df, other_bats): """ takes BAT_1 features name. # to support OR/AND neurons: # (x_1)^2 * (x_2^2) # (x_1)^2 * (x_2) # (x_1) * (x_2)^2 # (x_1) * (x_2) # to support AND neurons: # (x_1-x_2)^2 + (y_2-y_1)^2 (pairwise distance) """ f_names = map(lambda x: re.findall("BAT_[1-9]_F_(.*?)$", x), df.columns) f_names_flat = list(set([item for sublist in f_names for item in sublist])) pair_bats_names = [] for bat_name1 in other_bats: for bat_name2 in other_bats: if bat_name1 <= bat_name2: continue # or bat_name1 == "0" or bat_name2 == "0": continue pair_name = bat_name1 + bat_name2 pair_bats_names.append(pair_name) for f_name_ in f_names_flat: for f_name in [f_name_, f_name_ + "^2"]: v1 = df[dataset.get_col_name(bat_name1, f_name)] v2 = df[dataset.get_col_name(bat_name2, f_name)] df[dataset.get_col_name(pair_name, f_name, "PAIR")] = v1 * v2 return df, pair_bats_names
def hd_plot_1d(df, neuron, model_spikes, ax=None): if ax is None: fig, ax = plt.subplots() df_neuron = df.where(neuron.astype('bool')) df_model_spikes = df[model_spikes.astype('bool')] hd_spikes_radians = df_neuron[dataset.get_col_name(0, "HD")] / 180 * np.pi # ax.set_yticks([]) bins = 36 behavior = df[dataset.get_col_name(0, "HD")] / 180 * np.pi behavior_map = np.histogram(behavior, bins=np.linspace(0, 1, bins) * 2 * np.pi)[0] hd_spikes_radians_map = np.histogram(hd_spikes_radians, bins=np.linspace(0, 1, bins) * 2 * np.pi)[0] model_spikes = df_model_spikes[dataset.get_col_name(0, "HD")] / 180 * np.pi model_spikes_map = np.histogram(model_spikes, bins=np.linspace(0, 1, bins) * 2 * np.pi)[0] #print(hd_spikes_radians_map) #print(behavior_map) #print(hd_spikes_radians_map / behavior_map) #print(model_spikes_map / behavior_map) #ax.hist(behavior_map, bins=36, color="#cccccc", density=True) #ax.hist(hd_spikes_radians, bins=36, color="red", density=True) ax.plot(hd_spikes_radians_map / behavior_map) ax.plot(model_spikes_map / behavior_map) ax.set_xticks(list(range(0, 37, 12))) ax.set_yticks(np.linspace(0, 0.6, 4)) ax.set_xticklabels([x * 10 for x in ax.get_xticks() ]) # multiply by 10 since we have 36 bins. ax.set_yticklabels([y * 25 for y in ax.get_yticks()]) ax.set_title("HD spikes-rate per frame")
def add_nearest_distance(df): df[dataset.get_col_name( '0', 'nD', 'BAT')] = df[df.columns[df.columns.str.endswith('_D')]].dropna().min( axis=1) df[dataset.get_col_name('0', 'nD^2', 'BAT')] = df[dataset.get_col_name( '0', 'nD', 'BAT')] return df
def agg_importance_by_meaning(imp, names): feature_importance_dict = dict(zip(names, imp)) import re feature_names = list(feature_importance_dict.keys()) entities = sorted( list(set(map(lambda x: re.findall('^(.*?_.*?)_', x)[0], feature_names)))) # print("agg_importance_by_meaning(), bats:", bats) # bat_i- position (X, Y) # bat_i - ego-centric position (A, D) ret_d = {} # agg_map = {"position": ["X", "Y", "X^2", "Y^2", "X^0.5", "Y^0.5"], \ # "ego-centric": ["A", "D", "A^2", "D^2", "A^0.5", "D^0.5"]} agg_map = { "position": ["X", "Y", "X^2", "Y^2"], "ego-centric": ["A", "D", "A^2", "D^2"] } for entity in entities: if entity == 'BAT_0': continue for agg_f in agg_map: ret_d[f"{entity}_EF_{agg_f}"] = 0 for f in agg_map[agg_f]: prefix, name = entity.split('_') ret_d[f"{entity}_EF_{agg_f}"] += feature_importance_dict.pop( dataset.get_col_name(name, f, prefix)) if 'BAT_0' in entities: agg_map.pop("ego-centric") agg_map['head-direction'] = ["HD", "HD^2"] # , "HD^0.5" agg_map["near-D"] = ["nD", "nD^2"] for agg_f in agg_map: ret_d[f"BAT_0_EF_{agg_f}"] = 0 for f in agg_map[agg_f]: ret_d[f"BAT_0_EF_{agg_f}"] += feature_importance_dict.pop( dataset.get_col_name(0, f)) # also removes item paired_agg_features = ["position", "ego-centric"] for entity in entities: # prefix, name = entity.split('_') if entity <= entity: continue for paired_agg_feature in paired_agg_features: pass # new_feature_name = f"{entity}_EF_{j}{paired_agg_feature}" # ret_d[new_feature_name] = (ret_d[f"BAT_{i}_EF_{paired_agg_feature}"] # + ret_d[f"BAT_{j}_EF_{paired_agg_feature}"]) # / 2 # merge 2 dictions merged = {**ret_d, **feature_importance_dict} # print(merged) return list(merged.values()), list(merged.keys())
def ellipse_place_cell(df, Xcenter, Ycenter, a, b, bat_name=0): if isinstance(bat_name, list): n = 0 for i in bat_name: n |= ellipse_place_cell(df, Xcenter, Ycenter, a, b, i) return n bat_name = str(bat_name) bat_X = df[dataset.get_col_name(bat_name, "X")] bat_Y = df[dataset.get_col_name(bat_name, "Y")] n = ((bat_X - Xcenter)**2 / a**2) + ((bat_Y - Ycenter)**2 / b**2) <= 1 return n.astype('int')
def ego_trajectory_spike_plot(df, neuron, model_spikes, bat_name, ax=None, net="net1"): # assert str(bat_name) in dataset.get_other_bats_names(), "Err: Ego-centric plot has to be related to other bat" if ax is None: fig, ax = plt.subplots() width, height = get_net_dims(net) ego_max_distance = get_max_ego_distance(width, height) ax.set_xticks(np.linspace(-ego_max_distance, ego_max_distance, 3)) ax.set_yticks(np.linspace(-ego_max_distance, ego_max_distance, 3)) ax.set_ylim(-ego_max_distance, ego_max_distance) ax.set_xlim(-ego_max_distance, ego_max_distance) ax.set_xlabel(f"Bat {bat_name}# -ego X") ax.set_ylabel(f"Bat {bat_name}# -ego Y") ax.set_aspect('equal', 'box') # df_neuron = df[neuron.astype('bool')] bat_D = df[dataset.get_col_name(bat_name, "D")].copy() bat_A = df[dataset.get_col_name(bat_name, "A")].copy() if len(str(bat_name)) == 2: # prefix = "PAIR" bat_D **= 0.5 bat_A **= 0.5 relX = bat_D * np.cos(bat_A * np.pi / 180) relY = bat_D * np.sin(bat_A * np.pi / 180) relX_spikes = relX[neuron.astype('bool')] relY_spikes = relY[neuron.astype('bool')] model_spikes_relX = relX[model_spikes.astype('bool')] model_spikes_relY = relY[model_spikes.astype('bool')] ax.scatter(relX.values, relY.values, marker='.', color='#cccccc') ax.scatter(relX_spikes.values, relY_spikes.values, marker='.', color='r') ax.scatter(model_spikes_relX.values, model_spikes_relY.values, marker='.', color='b', alpha=MODEL_SPIKE_ALPHA) ax.set_title(f"#spikes: {neuron.sum()}") return ax
def bin_func(grouped_df): mean_cols = [] mean_features = ['X', 'Y'] for b in dataset.get_bats_names(): for f in mean_features: mean_cols.append(dataset.get_col_name(b, f)) result_df = grouped_df[mean_cols].mean() hd_col_name = dataset.get_col_name("0", "HD") result_df = pd.concat( [result_df, grouped_df[[hd_col_name]].apply(angle_avg)]) return result_df
def place_cell(df, Xcenter, Ycenter, radius, bat_name=0): if isinstance(bat_name, list): n = 0 for i in bat_name: n |= place_cell(df, Xcenter, Ycenter, radius, i) return n bat_name = str(bat_name) bat_X = df[dataset.get_col_name(bat_name, "X")] bat_Y = df[dataset.get_col_name(bat_name, "Y")] n = (((bat_X - Xcenter)**2 + (bat_Y - Ycenter)**2) <= radius**2).astype('int') return n
def rectangle_place_cell(df, left_most, top_most, width, height, bat_name=0): if isinstance(bat_name, list): n = 0 for i in bat_name: n |= ellipse_place_cell(df, left_most, top_most, width, height, i) return n bat_name = str(bat_name) bat_X = df[dataset.get_col_name(bat_name, "X")] bat_Y = df[dataset.get_col_name(bat_name, "Y")] n = ((bat_X >= left_most) & (bat_X <= left_most + width) & (bat_Y >= top_most) & (bat_Y <= top_most + height)) return n.astype('int')
def add_squared_features(df, other_bats): for f in ["X", "Y", "HD"]: df[dataset.get_col_name(0, f"{f}^2")] = df[dataset.get_col_name(0, f)]**2 # df[dataset.get_col_name(0, f"{f}^0.5")] = df[dataset.get_col_name(0, f)] ** 0.5 for bat_name in other_bats: for f_name in ["X", "Y", "A", "D"]: df[dataset.get_col_name(bat_name, f_name + "^2")] = df[dataset.get_col_name( bat_name, f_name)]**2 # df[dataset.get_col_name(bat_name, f_name + "^0.5")] = df[dataset.get_col_name(bat_name, f_name)] ** 0.5 return df
def add_pairwise_distance(df, other_bats): for bat_name1 in other_bats: for bat_name2 in other_bats: if bat_name1 <= bat_name2 or bat_name1 == "0" or bat_name2 == "0": continue pair_name = bat_name1 + bat_name2 x1 = df[dataset.get_col_name(bat_name1, "X")] x2 = df[dataset.get_col_name(bat_name2, "X")] y1 = df[dataset.get_col_name(bat_name1, "Y")] y2 = df[dataset.get_col_name(bat_name2, "Y")] d = np.sqrt((x2 - x1)**2 + (y2 - y1)**2) df[dataset.get_col_name(pair_name, "Dp", "PAIR")] = d # df[dataset.get_col_name(pair_name, "Dp^2", "PAIR")] = d ** 2 return df
def gaussian_place_cell(df, Xcenter, Ycenter, sigma, bat_name=0): if isinstance(bat_name, list): n = 0 for i in bat_name: n |= gaussian_place_cell(df, Xcenter, Ycenter, sigma, i) return n bat_name = str(bat_name) bat_X = df[dataset.get_col_name(bat_name, "X")] bat_Y = df[dataset.get_col_name(bat_name, "Y")] sigma /= 2 gaussian_vals = 1 * np.exp(-((bat_X - Xcenter)**2 + (bat_Y - Ycenter)**2) / (2 * sigma**2)) n = (gaussian_vals > np.random.random(len(gaussian_vals))).astype('int') return n
def linear_ramping_distance_cell(df, bat_name): distance = df[dataset.get_col_name(bat_name, "D")] min_distance = 2 * 10 max_distance = 7000 distance *= distance > min_distance distance *= distance < max_distance linear_vals = (distance - 20) / 10 * 0.1 linear_vals[linear_vals > 0.5] = 0.5 linear_vals.hist() n = (linear_vals > np.random.random(len(linear_vals))).astype('int') return n
def manual_normalization(df): df_scaled = pd.DataFrame(data=minmax_scale(df), columns=df.columns, index=df.index) return df_scaled df = df_.copy() normalization_factors = { "X": 100, "Y": 50, "D": 50, "A": 360, "HD": 360, "XY": 70, "YX": 70, "AD": 134, "DA": 134 } cols = df_.columns.to_list() bats = [] import re for c in cols: bats.append(re.findall("BAT_(.*?)_", c)[0]) bats = list(set(bats)) for i in bats: if dataset.get_col_name(i, "X") not in df.columns: continue # BAT doesn't exist for k in normalization_factors: if i == "0" and k in ["D", "A"]: continue if i != "0" and k in ["HD"]: continue if dataset.get_col_name(i, k) in df.columns: df[dataset.get_col_name(i, k)] /= normalization_factors[k] df[dataset.get_col_name(i, k + "^2")] /= normalization_factors[k]**2 return df
def add_pairwise_rotational_features(df, other_bats): pair_bats_name = [] for bat_name1 in other_bats: for bat_name2 in other_bats: if bat_name1 <= bat_name2: continue pair_name = bat_name1 + bat_name2 pair_bats_name.append(pair_name) for f_name1 in ["X", "Y"]: for f_name2 in ["X", "Y"]: if f_name1 == f_name2: continue df[dataset.get_col_name( pair_name, f_name1 + f_name2, "PAIR")] = np.sqrt( df[dataset.get_col_name(bat_name1, f_name1)] * df[dataset.get_col_name(bat_name2, f_name2)]) for f_name1 in ["D", "A"]: for f_name2 in ["D", "A"]: if f_name1 == f_name2: continue df[dataset.get_col_name( pair_name, f_name1 + f_name2, "PAIR")] = np.sqrt( df[dataset.get_col_name(bat_name1, f_name1)] * df[dataset.get_col_name(bat_name2, f_name2)]) return df
def trajectory_spike_plot(df, neuron, model_spikes, ax=None, bat_name=0, net="net1"): if ax is None: fig, ax = plt.subplots() width, height = get_net_dims(net) ax.set_xticks(np.arange(0, width + 1, 50)) ax.set_yticks(np.arange(0, height + 1, 50)) ax.set_xlim(0, width) ax.set_ylim(height, 0) prefix = "BAT" if len(str(bat_name)) == 2: prefix = "PAIR" ax.set_xlabel(f"{prefix} {bat_name}# X") ax.set_ylabel(f"{prefix} {bat_name}# Y") ax.set_aspect('equal', 'box') df_neuron = df[neuron.astype('bool')] bat_X = df[dataset.get_col_name(bat_name, "X")].copy() bat_Y = df[dataset.get_col_name(bat_name, "Y")].copy() spikes_X = df_neuron[dataset.get_col_name(bat_name, "X")].copy() spikes_Y = df_neuron[dataset.get_col_name(bat_name, "Y")].copy() df_model_neuron = df[model_spikes.astype('bool')] model_spikes_X = df_model_neuron[dataset.get_col_name(bat_name, "X")].copy() model_spikes_Y = df_model_neuron[dataset.get_col_name(bat_name, "Y")].copy() if prefix == "PAIR": bat_X **= 0.5 bat_Y **= 0.5 spikes_X **= 0.5 spikes_Y **= 0.5 model_spikes_X **= 0.5 model_spikes_Y **= 0.5 # return ax ax.scatter(bat_X.values, bat_Y.values, marker='.', color='#cccccc') ax.scatter(spikes_X.values, spikes_Y.values, marker='.', color='r') ax.scatter(model_spikes_X.values, model_spikes_Y.values, marker='.', color='b', alpha=MODEL_SPIKE_ALPHA) ax.set_title(f"#spikes: {neuron.sum()}") return ax
def distance_cell(df, distance, bat_name=0, distance_width=3): if isinstance(bat_name, list): n = 0 for i in bat_name: n |= distance_cell(df, distance, i, distance_width) return n bat_name = str(bat_name) assert bat_name in dataset.get_other_bats_names( ), "Err: Distance is only defined on other bats" bat_D = df[dataset.get_col_name(bat_name, "D")] n = abs(bat_D - distance) < distance_width return n.astype('int')
def angle_cell(df, angle, bat_name=0, angle_range=3): if isinstance(bat_name, list): n = 0 for i in bat_name: n |= angle_cell(df, angle, i, angle_range) return n bat_name = str(bat_name) assert bat_name in dataset.get_other_bats_names( ), "Err: Angle is only defined on other bats!" bat_A = df[dataset.get_col_name(bat_name, "A")] # n = (abs((bat_A - angle)%360) < angle_range) | (abs(bat_A - angle) < angle_range) n = ((bat_A - angle) % 360 < angle_range) | ( (angle - bat_A) % 360 < angle_range) return n.astype('int')
def gaussian_angle_cell(df, angle, bat_name=0, angle_range=3): if isinstance(bat_name, list): n = 0 for i in bat_name: n |= gaussian_angle_cell(df, angle, i, angle_range) return n bat_name = str(bat_name) assert bat_name in dataset.get_other_bats_names( ), "Err: Angle is only defined on other bats!" bat_A = df[dataset.get_col_name(bat_name, "A")] angle_range = 1 / np.radians(3) gaussian_vals = von_mises(np.radians(angle), angle_range, np.radians(bat_A)) n = (gaussian_vals > np.random.random(len(gaussian_vals))).astype('int') return n
def gaussian_distance_cell(df, distance, bat_name=0, distance_width=3): if isinstance(bat_name, list): n = 0 for i in bat_name: n |= gaussian_distance_cell(df, distance, i, distance_width) return n bat_name = str(bat_name) assert bat_name in dataset.get_other_bats_names( ), "Err: Distance is only defined on other bats" distance_width /= 2 bat_D = df[dataset.get_col_name(bat_name, "D")] gaussian_vals = 1 * np.exp(-((bat_D - distance)**2 / (2 * distance_width**2))) n = (gaussian_vals > np.random.random(len(gaussian_vals))).astype('int') return n.astype('int')
def bin_dataset(df, bin_size): df2 = df.copy() df2['group'] = df2.index.to_series() // bin_size t = time.time() df3 = df2.groupby('group').apply(bin_func) print("took:", time.time() - t, "sec") bats = [] for b in dataset.get_bats_names(): cols = df3.columns.to_series().str.startswith(f"BAT_{b}") new_cols_names = (cols[cols].index.str.extract('_F_(.*)')) bat_i_df = df3[cols[cols].index.to_series()] new_cols_names = new_cols_names[0].values map_dict = dict(zip(cols[cols].index, new_cols_names)) bat_i_df = bat_i_df.rename(columns=map_dict) bat_i_df['bat_id'] = b bats.append(bat_i_df) # imp.reload(dataset) grouped_df = dataset.build_dataset_inline(bats) # feature engineering e_df = grouped_df.copy() e_df[dataset.get_col_name(0, "X^2")] = e_df[dataset.get_col_name(0, "X")]**2 e_df[dataset.get_col_name(0, "Y^2")] = e_df[dataset.get_col_name(0, "Y")]**2 e_df[dataset.get_col_name(0, "HD^2")] = e_df[dataset.get_col_name(0, "HD")]**2 for bat_name in dataset.get_other_bats_names(): for f_name in ["X", "Y", "A", "D"]: e_df[dataset.get_col_name( bat_name, f_name + "^2")] = e_df[dataset.get_col_name(bat_name, f_name)]**2 return e_df
def add_squared_features_old(df, other_bats, pair_bats_names=[]): for f in ["X", "Y", "HD"]: df[dataset.get_col_name(0, f"{f}^2")] = df[dataset.get_col_name(0, f)]**2 # df[dataset.get_col_name(0, f"{f}^0.5")] = df[dataset.get_col_name(0, f)] ** 0.5 for bat_name in other_bats: for f_name in ["X", "Y", "A", "D"]: df[dataset.get_col_name(bat_name, f_name + "^2")] = df[dataset.get_col_name( bat_name, f_name)]**2 # df[dataset.get_col_name(bat_name, f_name + "^0.5")] = df[dataset.get_col_name(bat_name, f_name)] ** 0.5 for bat_name in pair_bats_names: for f_name in ["X", "Y", "A", "D", "D*"]: df[dataset.get_col_name(bat_name, f_name + "^2", "PAIR")] = df[dataset.get_col_name( bat_name, f_name, "PAIR")]**2 # df[dataset.get_col_name(bat_name, f_name + "^0.5", "PAIR")] = # df[dataset.get_col_name(bat_name, f_name, "PAIR")] ** 0.5 return df
def ego_rate_map_plot(df, neuron, bat_name=0, ax=None, net="net1"): # assert str(bat_name) in dataset.get_other_bats_names(), "Err: Ego-centric plot has to be related to other bat" np.seterr(divide='ignore', invalid='ignore') if ax is None: fig, ax = plt.subplots() BIN_SIZE = 5 width, height = get_net_dims(net) max_ego_distance = get_max_ego_distance(width, height) max_ego_distance2 = int(max_ego_distance / 0.6) # unscaling max_linspace = np.linspace(-max_ego_distance2, max_ego_distance2, 2 * max_ego_distance2 // BIN_SIZE) negative_idx = np.argmin(np.abs(max_linspace + max_ego_distance)) positive_idx = np.argmin(np.abs(max_linspace - max_ego_distance)) ax.set_xticks(np.linspace(0, positive_idx - negative_idx, 3)) ax.set_yticks(np.linspace(0, positive_idx - negative_idx, 3)) ax.set_xticklabels([-max_ego_distance, 0, max_ego_distance]) ax.set_yticklabels([-max_ego_distance, 0, max_ego_distance]) bat_name = str(bat_name) bat_D = df[dataset.get_col_name(bat_name, "D")].copy() bat_A = df[dataset.get_col_name(bat_name, "A")].copy() if len(str(bat_name)) == 2: bat_D **= 0.5 bat_A **= 0.5 relX = bat_D * np.cos(bat_A * np.pi / 180) relY = bat_D * np.sin(bat_A * np.pi / 180) time_spent = np.histogram2d(relX, relY, 2 * max_ego_distance // BIN_SIZE, range=[[-max_ego_distance, max_ego_distance], [-max_ego_distance, max_ego_distance]])[0] time_spent = time_spent * (time_spent >= TIME_SPENT_THRESHOLD) spikes = np.histogram2d(relX, relY, 2 * max_ego_distance // BIN_SIZE, weights=neuron, range=[[-max_ego_distance, max_ego_distance], [-max_ego_distance, max_ego_distance]])[0] spikes2 = spikes * (time_spent >= TIME_SPENT_THRESHOLD) # result = spikes2 / time_spent gauss_filter = fspecial_gauss(GAUSSIAN_FILTER_SIZE, GAUSSIAN_FILTER_SIGMA) # smooth_spikes = scipy.signal.convolve2d(gauss_filter, spikes2) # smooth_time_spent = scipy.signal.convolve2d(gauss_filter, time_spent) smooth_spikes = scipy.ndimage.correlate(spikes2, gauss_filter, mode='constant') smooth_time_spent = scipy.ndimage.correlate(time_spent, gauss_filter, mode='constant') smoothed_result = smooth_spikes / smooth_time_spent smoothed_result[time_spent < TIME_SPENT_THRESHOLD] = np.nan ax.set_title( f"Max firing rate: {FRAME_RATE * np.nanmax(smoothed_result):.2}") img = ax.imshow(smoothed_result.T, origin="lower", cmap="jet") img.set_clim(0, np.nanmax(smoothed_result)) return img
def rate_map_plot(df, neuron, bat_name=0, ax=None, net="net1"): BIN_SIZE = 3 width, height = get_net_dims(net) np.seterr(divide='ignore', invalid='ignore') if ax is None: fig, ax = plt.subplots() x_plot_range = np.linspace(0, width // BIN_SIZE - BIN_SIZE / 2 + 1, 3).round(1) y_plot_range = np.linspace(0, height // BIN_SIZE - BIN_SIZE / 2 + 1, 3).round(1) ax.set_xticks(x_plot_range) ax.set_yticks(y_plot_range) ax.set_xticklabels((x_plot_range * BIN_SIZE + BIN_SIZE / 2).round(1)) ax.set_yticklabels((y_plot_range * BIN_SIZE + BIN_SIZE / 2).round(1)) prefix = "BAT" if len(str(bat_name)) == 2: prefix = "PAIR" ax.set_xlabel(f"{prefix} {bat_name}# X") ax.set_ylabel(f"{prefix} {bat_name}# Y") ax.set_aspect('equal', 'box') bat_name = str(bat_name) bat_X = df[dataset.get_col_name(bat_name, "X")].copy() bat_Y = df[dataset.get_col_name(bat_name, "Y")].copy() if prefix == "PAIR": bat_X **= 0.5 bat_Y **= 0.5 time_spent = np.histogram2d(bat_X, bat_Y, [width // BIN_SIZE, height // BIN_SIZE], range=[(0, width), (0, height)])[0] time_spent = time_spent * (time_spent >= TIME_SPENT_THRESHOLD) spikes = np.histogram2d(bat_X, bat_Y, [width // BIN_SIZE, height // BIN_SIZE], weights=neuron, range=[(0, width), (0, height)])[0] spikes2 = spikes * (time_spent >= TIME_SPENT_THRESHOLD) gauss_filter = fspecial_gauss( GAUSSIAN_FILTER_SIZE, GAUSSIAN_FILTER_SIGMA) # divides by 3, multiply by 4 smooth_spikes = scipy.ndimage.correlate(spikes2, gauss_filter, mode='constant') smooth_time_spent = scipy.ndimage.correlate(time_spent, gauss_filter, mode='constant') # result = spikes2 / time_spent smoothed_result = smooth_spikes / smooth_time_spent smoothed_result[time_spent < TIME_SPENT_THRESHOLD] = np.nan ax.set_title( f"Max firing rate: {FRAME_RATE * np.nanmax(smoothed_result):.2}" ) # 25Hz img = ax.imshow(smoothed_result.T, cmap='jet') img.set_clim(0, np.nanmax(smoothed_result)) return img
def head_direction_cell(df, angle, angle_range=3): hd = df[dataset.get_col_name(0, "HD")] n = ((hd - angle) % 360 < angle_range) | ((angle - hd) % 360 < angle_range) return n.astype('int')
def gaussian_head_direction_cell(df, angle, angle_range=3): hd = df[dataset.get_col_name(0, "HD")] gaussian_vals = von_mises(np.radians(angle), 1 / np.radians(angle_range), np.radians(hd)) n = (gaussian_vals > np.random.random(len(gaussian_vals))).astype('int') return n