# Import data from mapping experiment mapping_space_directory = os.path.join(mapping_experiment.subdirs['analysis'], 'behaviour_space') eigenfish = np.load(os.path.join(mapping_space_directory, 'eigenfish.npy')) mean_tail, std_tail = np.load(os.path.join(mapping_space_directory, 'tail_statistics.npy')) exemplar_info = pd.read_csv(os.path.join(mapping_experiment.subdirs['analysis'], 'exemplars.csv'), index_col='bout_index', dtype={'ID': str, 'video_code': str}) exemplar_info = exemplar_info[exemplar_info['clean']] exemplars = BoutData.from_directory(exemplar_info, mapping_experiment.subdirs['kinematics'], check_tail_lengths=False, tail_columns_only=True) exemplars = exemplars.map(eigenfish, whiten=True, mean=mean_tail, std=std_tail) exemplars = exemplars.list_bouts(values=True, ndims=n_dims) # Set paths output_directory = create_folder(experiment.subdirs['analysis'], 'distance_matrices') # Import experiment bouts experiment_bouts = import_bouts(experiment.directory) experiment_bouts = experiment_bouts.map(eigenfish, whiten=True, mean=mean_tail, std=std_tail) # Compute distance matrices print_heading('CALCULATING DISTANCE MATRICES') distances = {} analysis_times = [] timer = Timer() timer.start() for ID in experiment_bouts.metadata['ID'].unique(): output_path, path_exists = create_filepath(output_directory, ID, '.npy', True) if path_exists: distances[ID] = np.load(output_path)
from datasets.blumenkohl import experiment as blu from datasets.lakritz import experiment as lak from behaviour_analysis.manage_files import create_folder from behaviour_analysis.miscellaneous import print_heading, Timer from behaviour_analysis.analysis.stimulus_mapping import BoutStimulusMapper import os import pandas as pd if __name__ == "__main__": for experiment in (blu, lak): print_heading(os.path.basename(experiment.directory)) stimulus_map_directory = create_folder(experiment.subdirs['analysis'], 'stimulus_maps') mapped_bouts = pd.read_csv(os.path.join(experiment.subdirs['analysis'], 'mapped_bouts.csv'), index_col=0, dtype={ 'ID': str, 'video_code': str }) # Calculate stimulus maps for each fish in parallel timer = Timer() timer.start() mapper = BoutStimulusMapper(mapped_bouts, experiment, stimulus_map_directory, n_threads=20)
mapped_bouts = pd.read_csv(os.path.join(experiment.subdirs['analysis'], 'mapped_bouts.csv'), index_col=0, dtype={ 'ID': str, 'video_code': str }) # Re-weighting in isomap space isomap = np.load( os.path.join(experiment.parent.subdirs['analysis'], 'isomap.npy'))[:, :3] W = generate_weights(isomap) # Create paths for saving transition_directory = create_folder(experiment.subdirs['analysis'], 'transitions') for condition, fish_info in experiment.data.groupby('condition'): print condition condition_directory = create_folder(transition_directory, condition) condition_bouts = mapped_bouts[mapped_bouts['ID'].isin( fish_info['ID'])] # Compute the transition matrix for each fish T = fish_transition_matrices(condition_bouts, state_col='exemplar', n_states=len(isomap), shuffle=False) np.save(os.path.join(condition_directory, 'transition_matrices.npy'), T) print T.shape # Redistribute transitions W = generate_weights(isomap, bandwidth=40.)
from datasets.lensectomy import experiment from behaviour_analysis.manage_files import create_folder from behaviour_analysis.analysis import EyeTrackingData import os import numpy as np if __name__ == "__main__": # Paths for saving output_directory = create_folder(experiment.subdirs['analysis'], 'eye_convergence') convergence_scores_path = os.path.join(output_directory, 'convergence_scores.csv') plots_directory = create_folder(output_directory, 'plots') distribution_filename = '{}_convergence_distribution.npy' # Import data eye_tracking = EyeTrackingData.from_experiment(experiment) # Calculate thresholds and scores convergence_scores = eye_tracking.calculate_convergence_scores( save_plots_to=plots_directory, threshold_limits=(30, 65)) # Save convergence_scores.to_csv(convergence_scores_path, index=False) # Combine right and left into unilateral condition unilateral_idxs = eye_tracking.metadata[ eye_tracking.metadata['condition'].isin(('right', 'left'))] eye_tracking.metadata.loc[unilateral_idxs.index, 'condition'] = 'unilateral' # Calculate convergence distribution for each condition for condition, condition_info in eye_tracking.metadata.groupby(
from datasets.main_dataset import experiment from paths import paths, capture_strike_directory from behaviour_analysis.analysis.stimulus_mapping import calculate_fish_sequences from behaviour_analysis.manage_files import create_folder from behaviour_analysis.miscellaneous import Timer from joblib import Parallel, delayed import numpy as np import pandas as pd import os strike_sequence_directory = create_folder(capture_strike_directory, 'strike_sequences') if __name__ == "__main__": capture_strikes = pd.read_csv(paths['capture_strikes'], index_col=0, dtype={'ID': str, 'video_code': str}) complete_strikes = capture_strikes[(capture_strikes['start'] - 500) >= 0] complete_strikes.to_csv(os.path.join(strike_sequence_directory, 'complete_strikes.csv'), index=True) analysis_times = Parallel(4)(delayed(calculate_fish_sequences)(ID, complete_strikes, experiment, strike_sequence_directory) for ID in complete_strikes['ID'].unique()) print 'Total time:', Timer.convert_time(np.sum(analysis_times)) print 'Average time:', Timer.convert_time(np.mean(analysis_times))
from datasets.main_dataset import experiment from behaviour_analysis.manage_files import create_folder import os behaviour_space_directory = create_folder(experiment.subdirs['analysis'], 'behaviour_space') paths = {} # Files saved in the experiment directory paths['bouts'] = os.path.join(experiment.directory, 'bouts.csv') # Files saved in analysis directory paths['isomap'] = os.path.join(experiment.subdirs['analysis'], 'isomap.npy') for fname in ('exemplars', 'mapped_bouts'): paths[fname] = os.path.join(experiment.subdirs['analysis'], fname + '.csv') # Files saved in behaviour space directory for fname in ('bout_indices', 'eigenfish', 'tail_statistics', 'explained_variance', 'distance_matrix_normal', 'distance_matrix_flipped', 'distance_matrix', 'cluster_labels', 'cluster_centres', 'exemplar_distance_matrix', 'kernel_pca_eigenvalues', 'reconstruction_errors', 'kinematic_features'): paths[fname] = os.path.join(behaviour_space_directory, fname + '.npy')
from datasets.lensectomy import experiment from behaviour_analysis.manage_files import create_folder import os import pandas as pd import numpy as np if __name__ == "__main__": # Paths for saving experiment.subdirs['analysis'] = os.path.join(experiment.directory, 'analysis') eye_convergence_directory = os.path.join(experiment.subdirs['analysis'], 'eye_convergence') output_directory = create_folder(eye_convergence_directory, 'hunt_initiation') # Open bouts mapped_bouts = pd.read_csv(os.path.join(experiment.subdirs['analysis'], 'mapped_bouts.csv'), index_col=0, dtype={ 'ID': str, 'video_code': str }) # Calculate the hunt initiation rate for each fish grouped by condition hunt_rate_by_condition = {} for condition, IDs in experiment.data.groupby('condition')['ID']: condition_bouts = mapped_bouts[mapped_bouts['ID'].isin(IDs)] condition_start_rate = [] for ID, fish_bouts in condition_bouts.groupby('ID'):
from behaviour_analysis.experiments import TrackingExperiment2D from behaviour_analysis.manage_files import create_folder import os video_directory = 'I:\\Duncan\\Behaviour\\prey_capture_experiments\\prey_capture\\videos' experiment = TrackingExperiment2D('D:\\DATA\\prey_capture', video_directory=video_directory, log=False) experiment.open() experiment.subdirs['analysis'] = create_folder(experiment.directory, 'analysis') clustering_directory = create_folder(experiment.subdirs['analysis'], 'clustering') paths = {} paths['exemplars'] = os.path.join(experiment.subdirs['analysis'], 'exemplars.csv') paths['mapped_bouts'] = os.path.join(experiment.subdirs['analysis'], 'mapped_bouts.csv') for fname in ['eigenvalues', 'weighted_isomap']: paths[fname] = os.path.join(clustering_directory, fname + '.npy')
from datasets.main_dataset import experiment from behaviour_analysis.manage_files import create_folder import os capture_strike_directory = create_folder(experiment.subdirs['analysis'], 'capture_strikes') paths = {} for fname in ('capture_strikes', 'strike_frames'): paths[fname] = os.path.join(capture_strike_directory, fname + '.csv') for fname in ('capture_strike_distance_matrix', 'isomapped_strikes'): paths[fname] = os.path.join(capture_strike_directory, fname + '.npy')
from behaviour_analysis.experiments import TrackingExperiment3D from behaviour_analysis.manage_files import create_folder experiment = TrackingExperiment3D('D:\\DATA\\3D_prey_capture') experiment.open() experiment.subdirs['analysis'] = create_folder(experiment.directory, 'analysis')
mapped_bouts = pd.read_csv(os.path.join(experiment.subdirs['analysis'], 'mapped_bouts.csv'), index_col=0, dtype={ 'ID': str, 'video_code': str }) end_hunts = mapped_bouts[(mapped_bouts['phase'] == 3)] capture_strikes = pd.read_csv(paths['capture_strikes'], index_col=0, dtype={ 'ID': str, 'video_code': str }) output_directory = create_folder(capture_strike_directory, 'proportion_strikes') fish_strike_proportions = {} for condition, IDs in experiment.data.groupby('condition')['ID']: condition_proportions = [] for idx, ID in IDs.iteritems(): n_end_hunts = (end_hunts['ID'] == ID).sum() n_strikes = (capture_strikes['ID'] == ID).sum() if n_end_hunts > 0: proportion = n_strikes / float(n_end_hunts) else: proportion = 0 condition_proportions.append(proportion) fish_strike_proportions[condition] = condition_proportions control_proportions = fish_strike_proportions['control']
from datasets.main_dataset import experiment from behaviour_analysis.manage_files import create_folder from behaviour_analysis.miscellaneous import find_contiguous, print_subheading import numpy as np import pandas as pd import os modelling_directory = create_folder(experiment.subdirs['analysis'], 'modelling') if __name__ == "__main__": mapped_bouts = pd.read_csv(os.path.join(experiment.subdirs['analysis'], 'mapped_bouts.csv'), index_col='transition_index', dtype={'ID': str, 'video_code': str}) n_states = len(mapped_bouts['module'].unique()) # ==================== # Find all bout chains # ==================== print_subheading('Finding bout chains') chains = [] for ID, fish_bouts in mapped_bouts.groupby('ID'): print ID for video_code, video_bouts in fish_bouts.groupby('video_code'): chain = video_bouts['module'] seqs = find_contiguous(chain.index, minsize=2, stepsize=1) for seq in seqs: chains.append(chain.loc[seq].values) n_chains = len(chains) # number of unbroken bout chains
S = 0.5 * (WTW + WTW.T) A = 0.5 * (WTW - WTW.T) # All transitions q, r = np.linalg.qr( np.concatenate([USVs[0, :, 1:3], USVa[0, :, :2]], axis=1)) Y = sch.linkage(q, method='ward') Z1 = sch.dendrogram(Y, orientation='left', no_plot=True) Z2 = sch.dendrogram(Y, no_plot=True) idx1 = Z1['leaves'] idx2 = Z2['leaves'] T_sorted = WTW[np.meshgrid(idx1, idx2)] S_sorted = S[np.meshgrid(idx1, idx2)] A_sorted = A[np.meshgrid(idx1, idx2)] fig2_dir = create_folder(output_directory, 'figure2') matrix_dir = create_folder(fig2_dir, 'matrices') # Transition matrix fig, ax = plt.subplots(figsize=(1.5, 1.5), gridspec_kw=dict(left=0, right=1, bottom=0, top=1)) ax.matshow(T_sorted, cmap='plasma', vmin=0, vmax=0.1) ax.set_xticks([]) ax.set_yticks([]) fig.savefig(os.path.join(matrix_dir, 'T.png')) plt.close(fig) # Symmetric matrix fig, ax = plt.subplots(figsize=(1.5, 1.5), gridspec_kw=dict(left=0, right=1, bottom=0, top=1)) ax.matshow(S_sorted, cmap='plasma', vmin=-0.1, vmax=0.1)
from datasets.main_dataset import experiment from behaviour_analysis.video import Video, video_code_to_timestamp from behaviour_analysis.miscellaneous import read_csv from behaviour_analysis.tracking import rotate_and_centre_image from behaviour_analysis.manage_files import create_folder import numpy as np import pandas as pd import os from ast import literal_eval import cv2 from skimage.exposure import rescale_intensity if __name__ == "__main__": video_output = create_folder(output_directory, 'video3') representative_bouts = pd.read_csv(os.path.join(experiment.subdirs['analysis'], 'clustering', 'representative_bouts.csv'), index_col='bout_index', dtype={'ID': str, 'video_code': str}) for idx, bout_info in representative_bouts.iterrows(): fish_info = experiment.data[experiment.data['ID'] == bout_info.ID].iloc[0] video_file = video_code_to_timestamp(bout_info.video_code) video_path = os.path.join(experiment.video_directory, fish_info.video_directory, video_file + '.avi') tracking_path = os.path.join(experiment.subdirs['tracking'], fish_info.ID, bout_info.video_code + '.csv') tracking = read_csv(tracking_path, centre=literal_eval) v = Video(video_path) frames = v.return_frames(bout_info.start, bout_info.end)
from behaviour_analysis.manage_files import create_folder from behaviour_analysis.analysis.alignment import calculate_distance_matrix from behaviour_analysis.analysis.embedding import IsomapPrecomputed from behaviour_analysis.analysis.bouts import BoutData import os import numpy as np import pandas as pd from scipy.spatial.distance import squareform if __name__ == "__main__": ndims = 6 behaviour_space_directory = os.path.join(experiment.subdirs['analysis'], 'behaviour_space') output_directory = create_folder(behaviour_space_directory, 'six_principal_components') # Explained variance explained_variance = np.load( os.path.join(behaviour_space_directory, 'explained_variance.npy')) print '{} principal components explain:'.format(ndims), np.cumsum( explained_variance)[ndims - 1] # Import exemplar bouts exemplars = pd.read_csv(os.path.join(experiment.subdirs['analysis'], 'exemplars.csv'), index_col='bout_index', dtype=dict(ID=str, video_code=str)) exemplars = exemplars[exemplars['clean']] exemplar_bouts = BoutData.from_directory(exemplars, experiment.subdirs['kinematics'],
import cv2 from skimage.exposure import rescale_intensity import pandas as pd import numpy as np from datasets.main_dataset import experiment exemplar_strikes = pd.read_csv(os.path.join(output_directory, 'figure5', 'exemplar_strikes.csv'), index_col='bout_index', dtype={ 'ID': str, 'video_code': str }) attacks = exemplar_strikes.iloc[:6] sstrikes = exemplar_strikes.iloc[10:16] strike_video_directory = create_folder(output_directory, 'video4') for strike_name, strike_cluster, pad in zip( ('example_attacks', 'example_sstrikes'), (attacks, sstrikes), (50, 0)): save_images_to = create_folder(strike_video_directory, strike_name) panels = [] for idx, bout_info in strike_cluster.iterrows(): fish_info = experiment.data[experiment.data['ID'] == bout_info.ID].iloc[0] video_file = video_code_to_timestamp(bout_info.video_code) video_path = os.path.join(experiment.video_directory, fish_info.video_directory, video_file + '.avi') tracking_path = os.path.join(experiment.subdirs['tracking'], fish_info.ID, bout_info.video_code + '.csv')