check_tail_lengths=False, tail_columns_only=True) exemplars = exemplars.map(eigenfish, whiten=True, mean=mean_tail, std=std_tail) exemplars = exemplars.list_bouts(values=True, ndims=n_dims) # Set paths output_directory = create_folder(experiment.subdirs['analysis'], 'distance_matrices') # Import experiment bouts experiment_bouts = import_bouts(experiment.directory) experiment_bouts = experiment_bouts.map(eigenfish, whiten=True, mean=mean_tail, std=std_tail) # Compute distance matrices print_heading('CALCULATING DISTANCE MATRICES') distances = {} analysis_times = [] timer = Timer() timer.start() for ID in experiment_bouts.metadata['ID'].unique(): output_path, path_exists = create_filepath(output_directory, ID, '.npy', True) if path_exists: distances[ID] = np.load(output_path) if not path_exists: print ID + '...', queries = experiment_bouts.list_bouts(IDs=[ID], values=True, ndims=n_dims) fish_distances = calculate_distance_matrix_templates(queries, exemplars, fs=frame_rate) distances[ID] = fish_distances time_taken = timer.lap() analysis_times.append(time_taken) print timer.convert_time(time_taken) np.save(output_path, fish_distances) if len(analysis_times) > 0:
from paths import stimulus_map_directory from behaviour_analysis.analysis.stimulus_mapping import find_paramecia from behaviour_analysis.miscellaneous import Timer import numpy as np import os if __name__ == "__main__": timer = Timer() timer.start() # Compute average rand = np.load(os.path.join(stimulus_map_directory, 'random_frames.npy')) subset_averages = [] subset_counts = [] for i in np.arange(0, len(rand), 1000): subset = rand[i:i + 1000].astype('float64') subset_averages.append(subset.mean(axis=0)) subset_counts.append(float(len(subset))) subset_averages = np.array(subset_averages) subset_counts = np.array(subset_counts) average = np.einsum('i,ijk->jk', subset_counts, subset_averages) average /= subset_counts.sum() np.save(os.path.join(stimulus_map_directory, 'random_average.npy'), average) # Compute histogram histogram = np.zeros((250, 250))
from behaviour_analysis.analysis.stimulus_mapping import BoutStimulusMapper import os import pandas as pd if __name__ == "__main__": for experiment in (blu, lak): print_heading(os.path.basename(experiment.directory)) stimulus_map_directory = create_folder(experiment.subdirs['analysis'], 'stimulus_maps') mapped_bouts = pd.read_csv(os.path.join(experiment.subdirs['analysis'], 'mapped_bouts.csv'), index_col=0, dtype={ 'ID': str, 'video_code': str }) # Calculate stimulus maps for each fish in parallel timer = Timer() timer.start() mapper = BoutStimulusMapper(mapped_bouts, experiment, stimulus_map_directory, n_threads=20) mapper.run() total_time = timer.stop() print 'Total time:', timer.convert_time(total_time)
from datasets.main_dataset import experiment from paths import paths, capture_strike_directory from behaviour_analysis.analysis.stimulus_mapping import calculate_fish_sequences from behaviour_analysis.manage_files import create_folder from behaviour_analysis.miscellaneous import Timer from joblib import Parallel, delayed import numpy as np import pandas as pd import os strike_sequence_directory = create_folder(capture_strike_directory, 'strike_sequences') if __name__ == "__main__": capture_strikes = pd.read_csv(paths['capture_strikes'], index_col=0, dtype={'ID': str, 'video_code': str}) complete_strikes = capture_strikes[(capture_strikes['start'] - 500) >= 0] complete_strikes.to_csv(os.path.join(strike_sequence_directory, 'complete_strikes.csv'), index=True) analysis_times = Parallel(4)(delayed(calculate_fish_sequences)(ID, complete_strikes, experiment, strike_sequence_directory) for ID in complete_strikes['ID'].unique()) print 'Total time:', Timer.convert_time(np.sum(analysis_times)) print 'Average time:', Timer.convert_time(np.mean(analysis_times))
# Compare first antisymmetric transition mode a1_dot_a = np.abs(np.dot(USVa1[0, :, 0], USVa2[0, :, 0])) a1_dot_b = np.abs(np.dot(USVa1[0, :, 0], USVa2[0, :, 1])) a1_dot = max(a1_dot_a, a1_dot_b) # Append to output dot_products.append((s1_dot, s2_dot, a1_dot)) i += 1 return np.array(dot_products) if __name__ == "__main__": transition_directory = os.path.join(experiment.subdirs['analysis'], 'transitions') control_matrices = np.load( os.path.join(transition_directory, 'control', 'transition_matrices.npy')) ablated_matrices = np.load( os.path.join(transition_directory, 'ablated', 'transition_matrices.npy')) timer = Timer() timer.start() dot_products = compare_transition_modes(ablated_matrices, control_matrices, exact=False, n_permutations=100) np.save(os.path.join(transition_directory, 'compare_control_ablated.npy'), dot_products) print timer.convert_time(timer.stop())
# Import bouts capture_strike_data = BoutData.from_directory(capture_strikes, experiment.subdirs['kinematics'], check_tail_lengths=False, tail_columns_only=True) # Transform transformed_strikes = capture_strike_data.map(eigenfish, whiten=True, mean=mean_tail, std=std_tail) transformed_strikes = transformed_strikes.list_bouts(values=True, ndims=3) # Truncate truncated_strikes = [bout[12:37] for bout in transformed_strikes] bw = 0.006 # 3 frames # Calculate distance matrix print_heading('CALCULATING CAPTURE STRIKE DISTANCE MATRIX') timer = Timer() timer.start() D_normal = calculate_distance_matrix(truncated_strikes, bw=bw) D_flipped = calculate_distance_matrix(truncated_strikes, bw=bw, flip=True) D = np.min([D_normal, D_flipped], axis=0) np.save(paths['distance_matrix'], D) time_taken = timer.stop() print 'Time taken: {}'.format(timer.convert_time(time_taken)) # Perform embedding D = squareform(D) np.random.seed(1992) isomap = IsomapPrecomputed(n_neighbors=5, n_components=2) isomapped_strikes = isomap.fit_transform(D) np.save(paths['isomapped_strikes'], isomapped_strikes)