def main(): for expt_id, value in THRESHOLDS_DICT.iteritems(): threshold = models.Threshold(experiment_id=expt_id, determination='chosen40_500', value=value) session.add(threshold) commit(session)
def main(): # loop over all experiments for expt in session.query(models.Experiment): print("In experiment '{}'".format(expt.id)) dt = 1 / expt.sampling_frequency for traj in session.query( models.Trajectory).filter_by(experiment=expt): positions = traj.positions(session) velocities = traj.velocities(session) # calculate kinematic quantities velocities_a = kinematics.norm(velocities) accelerations = kinematics.acceleration(velocities, dt) accelerations_a = kinematics.norm(accelerations) headings = kinematics.heading(velocities) angular_velocities = kinematics.angular_velocity(velocities, dt) angular_velocities_a = kinematics.norm(angular_velocities) angular_accelerations = kinematics.acceleration( angular_velocities, dt) angular_accelerations_a = kinematics.norm(angular_accelerations) distance_from_wall = kinematics.distance_from_wall( positions, WALL_BOUNDS) # store kinematic quantities in timepoints for ctr, tp in enumerate(traj.timepoints(session)): tp.velocity_a = velocities_a[ctr] tp.acceleration_x, tp.acceleration_y, tp.acceleration_z = accelerations[ ctr] tp.acceleration_a = accelerations_a[ctr] tp.heading_xy, tp.heading_xz, tp.heading_xyz = headings[ctr] tp.angular_velocity_x, tp.angular_velocity_y, tp.angular_velocity_z = angular_velocities[ ctr] tp.angular_velocity_a = angular_velocities_a[ctr] tp.angular_acceleration_x, tp.angular_acceleration_y, tp.angular_acceleration_z = angular_accelerations[ ctr] tp.angular_acceleration_a = angular_accelerations_a[ctr] tp.distance_from_wall = distance_from_wall[ctr] session.add(tp) commit(session)
def main(): for expt in session.query(models.Experiment): print('In experiment "{}"...'.format(expt.id)) for odor_state in ODOR_STATES: print('Odor state = "{}"'.format(odor_state)) trajs = session.query(models.Trajectory).\ filter_by(experiment=expt, odor_state=odor_state, clean=True) for variable_name in QUANTITIES: print('{}...'.format(variable_name)) traj_data = [] traj_ctr = 0 for traj in trajs: traj_data.extend( traj.timepoint_field(session, variable_name)) traj_ctr += 1 lb, ub = None, None if variable_name.endswith('_a') or 'heading' in variable_name: lb = 0 if 'heading' in variable_name: ub = 180 cts, bins = make_distribution(np.array(traj_data), N_BINS, lb=lb, ub=ub) file_name = '{}_{}_{}.pickle'.format(expt.id, odor_state, variable_name) tp_dstr = models.TimepointDistribution( figure_root_path_env_var=figure_data_env_var, directory_path=DIRECTORY_PATH, file_name=file_name, variable=variable_name, experiment_id=expt.id, odor_state=odor_state, n_data_points=len(traj_data), n_trajectories=traj_ctr, bin_min=bins[0], bin_max=bins[-1], n_bins=N_BINS) tp_dstr.data = {'cts': cts, 'bins': bins} session.add(tp_dstr) commit(session)
def main(): for expt in session.query(models.Experiment): threshold = session.query(models.Threshold).filter_by(experiment=expt, determination=DETERMINATION).first() for cg in threshold.crossing_groups: print(cg.id) for crossing in cg.crossings: position_x_entry = crossing.timepoint_field(session, 'position_x', 0, 0, 'entry', 'entry')[0] position_y_entry = crossing.timepoint_field(session, 'position_y', 0, 0, 'entry', 'entry')[0] position_z_entry = crossing.timepoint_field(session, 'position_z', 0, 0, 'entry', 'entry')[0] position_x_peak = crossing.timepoint_field(session, 'position_x', 0, 0, 'peak', 'peak')[0] position_y_peak = crossing.timepoint_field(session, 'position_y', 0, 0, 'peak', 'peak')[0] position_z_peak = crossing.timepoint_field(session, 'position_z', 0, 0, 'peak', 'peak')[0] position_x_exit = crossing.timepoint_field(session, 'position_x', 0, 0, 'exit', 'exit')[0] position_y_exit = crossing.timepoint_field(session, 'position_y', 0, 0, 'exit', 'exit')[0] position_z_exit = crossing.timepoint_field(session, 'position_z', 0, 0, 'exit', 'exit')[0] heading_xy_entry = crossing.timepoint_field(session, 'heading_xy', 0, 0, 'entry', 'entry')[0] heading_xz_entry = crossing.timepoint_field(session, 'heading_xz', 0, 0, 'entry', 'entry')[0] heading_xyz_entry = crossing.timepoint_field(session, 'heading_xyz', 0, 0, 'entry', 'entry')[0] heading_xy_peak = crossing.timepoint_field(session, 'heading_xy', 0, 0, 'peak', 'peak')[0] heading_xz_peak = crossing.timepoint_field(session, 'heading_xz', 0, 0, 'peak', 'peak')[0] heading_xyz_peak = crossing.timepoint_field(session, 'heading_xyz', 0, 0, 'peak', 'peak')[0] heading_xy_exit = crossing.timepoint_field(session, 'heading_xy', 0, 0, 'exit', 'exit')[0] heading_xz_exit = crossing.timepoint_field(session, 'heading_xz', 0, 0, 'exit', 'exit')[0] heading_xyz_exit = crossing.timepoint_field(session, 'heading_xyz', 0, 0, 'exit', 'exit')[0] crossing.feature_set_basic = models.CrossingFeatureSetBasic(position_x_entry=position_x_entry, position_y_entry=position_y_entry, position_z_entry=position_z_entry, position_x_peak=position_x_peak, position_y_peak=position_y_peak, position_z_peak=position_z_peak, position_x_exit=position_x_exit, position_y_exit=position_y_exit, position_z_exit=position_z_exit, heading_xy_entry=heading_xy_entry, heading_xz_entry=heading_xz_entry, heading_xyz_entry=heading_xyz_entry, heading_xy_peak=heading_xy_peak, heading_xz_peak=heading_xz_peak, heading_xyz_peak=heading_xyz_peak, heading_xy_exit=heading_xy_exit, heading_xz_exit=heading_xz_exit, heading_xyz_exit=heading_xyz_exit) session.add(crossing) commit(session)
def main(): for expt in session.query(models.Experiment): print('In experiment "{}"...'.format(expt.id)) for odor_state in ODOR_STATES: print('Odor state = "{}"'.format(odor_state)) trajs = session.query(models.Trajectory).\ filter_by(experiment=expt, odor_state=odor_state, clean=True) for variable_name in QUANTITIES: print('{}...'.format(variable_name)) traj_data = [] traj_ctr = 0 for traj in trajs: traj_data.extend(traj.timepoint_field(session, variable_name)) traj_ctr += 1 lb, ub = None, None if variable_name.endswith('_a') or 'heading' in variable_name: lb = 0 if 'heading' in variable_name: ub = 180 cts, bins = make_distribution(np.array(traj_data), N_BINS, lb=lb, ub=ub) file_name = '{}_{}_{}.pickle'.format(expt.id, odor_state, variable_name) tp_dstr = models.TimepointDistribution(figure_root_path_env_var=figure_data_env_var, directory_path=DIRECTORY_PATH, file_name=file_name, variable=variable_name, experiment_id=expt.id, odor_state=odor_state, n_data_points=len(traj_data), n_trajectories=traj_ctr, bin_min=bins[0], bin_max=bins[-1], n_bins=N_BINS) tp_dstr.data = {'cts': cts, 'bins': bins} session.add(tp_dstr) commit(session)
def main(): # loop over all experiments for expt in session.query(models.Experiment): print("In experiment '{}'".format(expt.id)) dt = 1 / expt.sampling_frequency for traj in session.query(models.Trajectory).filter_by(experiment=expt): positions = traj.positions(session) velocities = traj.velocities(session) # calculate kinematic quantities velocities_a = kinematics.norm(velocities) accelerations = kinematics.acceleration(velocities, dt) accelerations_a = kinematics.norm(accelerations) headings = kinematics.heading(velocities) angular_velocities = kinematics.angular_velocity(velocities, dt) angular_velocities_a = kinematics.norm(angular_velocities) angular_accelerations = kinematics.acceleration(angular_velocities, dt) angular_accelerations_a = kinematics.norm(angular_accelerations) distance_from_wall = kinematics.distance_from_wall(positions, WALL_BOUNDS) # store kinematic quantities in timepoints for ctr, tp in enumerate(traj.timepoints(session)): tp.velocity_a = velocities_a[ctr] tp.acceleration_x, tp.acceleration_y, tp.acceleration_z = accelerations[ctr] tp.acceleration_a = accelerations_a[ctr] tp.heading_xy, tp.heading_xz, tp.heading_xyz = headings[ctr] tp.angular_velocity_x, tp.angular_velocity_y, tp.angular_velocity_z = angular_velocities[ctr] tp.angular_velocity_a = angular_velocities_a[ctr] tp.angular_acceleration_x, tp.angular_acceleration_y, tp.angular_acceleration_z = angular_accelerations[ctr] tp.angular_acceleration_a = angular_accelerations_a[ctr] tp.distance_from_wall = distance_from_wall[ctr] session.add(tp) commit(session)
def main(): for expt in session.query(models.Experiment): print('In experiment "{}"...'.format(expt.id)) for odor_state in ODOR_STATES: print('Odor state = "{}"'.format(odor_state)) trajs = session.query(models.Trajectory).\ filter_by(experiment=expt, odor_state=odor_state, clean=True) for variable in QUANTITIES: print('{}...'.format(variable)) tp_data = [ traj.timepoint_field(session, variable) for traj in trajs ] n_data_points = np.sum([len(d) for d in tp_data]) window_len = N_LAGS / expt.sampling_frequency acor, p_value, conf_lb, conf_ub = \ time_series.xcov_multi_with_confidence(tp_data, tp_data, 0, N_LAGS, normed=True) time_vector = np.arange(len(acor)) / expt.sampling_frequency file_name = '{}_{}_{}.pickle'.format(expt.id, odor_state, variable) tp_acor = models.TimepointAutocorrelation( figure_root_path_env_var=figure_data_env_var, directory_path=DIRECTORY_PATH, file_name=file_name, variable=variable, experiment_id=expt.id, odor_state=odor_state, n_data_points=n_data_points, n_trajectories=len(tp_data), window_len=window_len) tp_acor.data = { 'time_vector': time_vector, 'autocorrelation': acor, 'p_value': p_value, 'confidence_lower': conf_lb, 'confidence_upper': conf_ub } session.add(tp_acor) commit(session)
def main(): for th_ctr in range(2): for expt in session.query(models.Experiment): print('Experiment "{}"'.format(expt.id)) threshold_value = THRESHOLD_VALUES[expt.insect][th_ctr] # make threshold threshold = models.Threshold(experiment=expt, determination='arbitrary', value=threshold_value) session.add(threshold) # loop over odor states for odor_state in ODOR_STATES: print('Odor "{}"'.format(odor_state)) # make crossing group cg_id = '{}_{}_th{}'.format(expt.id, odor_state, threshold_value) cg = models.CrossingGroup(id=cg_id, experiment=expt, odor_state=odor_state, threshold=threshold) session.add(cg) # get crossings for each trajectory for traj in session.query(models.Trajectory).\ filter_by(experiment=expt, odor_state=odor_state, clean=True): segments, peaks = time_series.segment_by_threshold(traj.odors(session), threshold_value, traj.timepoint_ids_extended) # add crossings for s_ctr, (segment, peak) in enumerate(zip(segments, peaks)): crossing = models.Crossing(trajectory=traj, crossing_number=s_ctr + 1, crossing_group=cg) crossing.start_timepoint_id = segment[0] crossing.entry_timepoint_id = segment[1] crossing.peak_timepoint_id = segment[2] crossing.exit_timepoint_id = segment[3] - 1 crossing.end_timepoint_id = segment[4] - 1 crossing.max_odor = peak session.add(crossing) commit(session)
def main(): for expt in session.query(models.Experiment): if 'mosquito' in expt.id: baseline = MOSQUITO_BASELINE_ODOR else: baseline = 0 trajs = session.query(models.Trajectory).filter_by(experiment=expt, clean=True) for traj in trajs: odor = traj.odors(session) integrated_odor = (odor - baseline).sum() / 100 traj.odor_stats = models.TrajectoryOdorStats(integrated_odor=integrated_odor) session.add(traj) commit(session)
def main(): for expt in session.query(models.Experiment): print('Experiment "{}"'.format(expt.id)) threshold = session.query(models.Threshold).\ filter_by(experiment=expt, determination=DETERMINATION).first() # loop over odor states for odor_state in ODOR_STATES: print('Odor "{}"'.format(odor_state)) # make crossing group cg_id = '{}_{}_th{}_{}'.format(expt.id, odor_state, threshold.value, DETERMINATION) cg = models.CrossingGroup(id=cg_id, experiment=expt, odor_state=odor_state, threshold=threshold) session.add(cg) # get crossings for each trajectory for traj in session.query(models.Trajectory).\ filter_by(experiment=expt, odor_state=odor_state, clean=True): segments, peaks = time_series.segment_by_threshold( traj.odors(session), threshold.value, traj.timepoint_ids_extended) # add crossings for s_ctr, (segment, peak) in enumerate(zip(segments, peaks)): crossing = models.Crossing(trajectory=traj, crossing_number=s_ctr + 1, crossing_group=cg) crossing.start_timepoint_id = segment[0] crossing.entry_timepoint_id = segment[1] crossing.peak_timepoint_id = segment[2] crossing.exit_timepoint_id = segment[3] - 1 crossing.end_timepoint_id = segment[4] - 1 crossing.max_odor = peak session.add(crossing) commit(session)
def main(): for expt in session.query(models.Experiment): if 'mosquito' in expt.id: baseline = MOSQUITO_BASELINE_ODOR else: baseline = 0 trajs = session.query(models.Trajectory).filter_by(experiment=expt, clean=True) for traj in trajs: odor = traj.odors(session) integrated_odor = (odor - baseline).sum() / 100 traj.odor_stats = models.TrajectoryOdorStats( integrated_odor=integrated_odor) session.add(traj) commit(session)
def main(): for expt in session.query(models.Experiment): print('In experiment "{}"...'.format(expt.id)) for odor_state in ODOR_STATES: print('Odor state = "{}"'.format(odor_state)) trajs = session.query(models.Trajectory).\ filter_by(experiment=expt, odor_state=odor_state, clean=True) for variable in QUANTITIES: print('{}...'.format(variable)) tp_data = [traj.timepoint_field(session, variable) for traj in trajs] n_data_points = np.sum([len(d) for d in tp_data]) window_len = N_LAGS / expt.sampling_frequency acor, p_value, conf_lb, conf_ub = \ time_series.xcov_multi_with_confidence(tp_data, tp_data, 0, N_LAGS, normed=True) time_vector = np.arange(len(acor)) / expt.sampling_frequency file_name = '{}_{}_{}.pickle'.format(expt.id, odor_state, variable) tp_acor = models.TimepointAutocorrelation(figure_root_path_env_var=figure_data_env_var, directory_path=DIRECTORY_PATH, file_name=file_name, variable=variable, experiment_id=expt.id, odor_state=odor_state, n_data_points=n_data_points, n_trajectories=len(tp_data), window_len=window_len) tp_acor.data = {'time_vector': time_vector, 'autocorrelation': acor, 'p_value': p_value, 'confidence_lower': conf_lb, 'confidence_upper': conf_ub} session.add(tp_acor) commit(session)
def main(): for insect in INSECTS: cleaning_params_list = session.query(models.TrajectoryCleaningParameter.param, models.TrajectoryCleaningParameter.value).\ filter_by(insect=insect).all() cleaning_params = dict(cleaning_params_list) for expt in session.query(models.Experiment).filter_by(insect=insect): for traj in expt.trajectories: clean_portions = clean_traj(traj, cleaning_params) for ctr, clean_portion in enumerate(clean_portions): if clean_portion[ 0] == traj.start_timepoint_id and clean_portion[ 1] == traj.end_timepoint_id: traj.clean = True portion_traj = traj else: stp_id, etp_id = clean_portion # make new trajectory id = traj.id + '_c{}'.format(ctr) portion_traj = models.Trajectory( id=id, start_timepoint_id=stp_id, end_timepoint_id=etp_id, experiment=expt, raw=False, clean=True, odor_state=traj.odor_state) session.add(portion_traj) portion_traj.basic_info = make_trajectory_basic_info( portion_traj) session.add(portion_traj) commit(session)
def main(n_trials, n_train_max, n_test_max, root_dir_env_var): # make basis functions basis_ins, basis_outs, max_filter_length = igfh.make_exponential_basis_functions( INPUT_TAUS, OUTPUT_TAUS, DOMAIN_FACTOR ) for expt_id in EXPERIMENT_IDS: for odor_state in ODOR_STATES: trajs = igfh.get_trajs_with_integrated_odor_above_threshold( expt_id, odor_state, INTEGRATED_ODOR_THRESHOLD ) train_test_ratio = (n_train_max / (n_train_max + n_test_max)) test_train_ratio = (n_test_max / (n_train_max + n_test_max)) n_train = min(n_train_max, np.floor(len(trajs) * train_test_ratio)) n_test = min(n_test_max, np.floor(len(trajs) * test_train_ratio)) trajs_trains = [] trajs_tests = [] glmss = [] residualss = [] for trial_ctr in range(n_trials): print('{}: odor {} (trial number: {})'.format(expt_id, odor_state, trial_ctr)) # get random set of training and test trajectories perm = np.random.permutation(len(trajs)) train_idxs = perm[:n_train] test_idxs = perm[-n_test:] trajs_train = list(np.array(trajs)[train_idxs]) trajs_test = list(np.array(trajs)[test_idxs]) # do some more stuff glms = [] residuals = [] for input_set, output, basis_in, basis_out in zip(INPUT_SETS, OUTPUTS, basis_ins, basis_outs): # get relevant time-series data from each trajectory set data_train = igfh.time_series_from_trajs( trajs_train, inputs=input_set, output=output ) data_test = igfh.time_series_from_trajs( trajs_test, inputs=input_set, output=output ) glm = fitting.GLMFitter(link=LINK, family=FAMILY) glm.set_params(DELAY, basis_in=basis_in, basis_out=False) glm.input_set = input_set glm.output = output # fit to training data glm.fit(data=data_train, start=START_TIMEPOINT) # predict test data prediction = glm.predict(data=data_test, start=START_TIMEPOINT) _, ground_truth = glm.make_feature_matrix_and_response_vector(data_test, START_TIMEPOINT) # calculate residual residual = np.sqrt(((prediction - ground_truth)**2).mean()) # clear out feature matrix and response from glm for efficient storage glm.feature_matrix = None glm.response_vector = None glm.results.remove_data() # store things glms.append(glm) residuals.append(residual) trajs_train_ids = [traj.id for traj in trajs_train] trajs_test_ids = [traj.id for traj in trajs_test] trajs_trains.append(trajs_train_ids) trajs_tests.append(trajs_test_ids) glmss.append(glms) residualss.append(residuals) # save a glm fit set glm_fit_set = models.GlmFitSet() # add data to it glm_fit_set.root_dir_env_var = root_dir_env_var glm_fit_set.path_relative = 'glm_fit' glm_fit_set.file_name = '{}_{}_odor_{}.pickle'.format(FIT_NAME, expt_id, odor_state) glm_fit_set.experiment = session.query(models.Experiment).get(expt_id) glm_fit_set.odor_state = odor_state glm_fit_set.name = FIT_NAME glm_fit_set.link = LINK glm_fit_set.family = FAMILY glm_fit_set.integrated_odor_threshold = INTEGRATED_ODOR_THRESHOLD glm_fit_set.predicted = PREDICTED glm_fit_set.delay = DELAY glm_fit_set.start_time_point = START_TIMEPOINT glm_fit_set.n_glms = len(glms) glm_fit_set.n_train = n_train glm_fit_set.n_test = n_test glm_fit_set.n_trials = n_trials # save data file glm_fit_set.save_to_file( input_sets=INPUT_SETS, outputs=OUTPUTS, basis_in=basis_ins, basis_out=basis_outs, trajs_train=trajs_trains, trajs_test=trajs_tests, glms=glmss, residuals=residualss ) # save everything else (+ link to data file) in database session.add(glm_fit_set) commit(session)
def main(): for expt in session.query(models.Experiment): threshold = session.query(models.Threshold).filter_by( experiment=expt, determination=DETERMINATION).first() for cg in threshold.crossing_groups: print(cg.id) for crossing in cg.crossings: position_x_entry = crossing.timepoint_field( session, 'position_x', 0, 0, 'entry', 'entry')[0] position_y_entry = crossing.timepoint_field( session, 'position_y', 0, 0, 'entry', 'entry')[0] position_z_entry = crossing.timepoint_field( session, 'position_z', 0, 0, 'entry', 'entry')[0] position_x_peak = crossing.timepoint_field( session, 'position_x', 0, 0, 'peak', 'peak')[0] position_y_peak = crossing.timepoint_field( session, 'position_y', 0, 0, 'peak', 'peak')[0] position_z_peak = crossing.timepoint_field( session, 'position_z', 0, 0, 'peak', 'peak')[0] position_x_exit = crossing.timepoint_field( session, 'position_x', 0, 0, 'exit', 'exit')[0] position_y_exit = crossing.timepoint_field( session, 'position_y', 0, 0, 'exit', 'exit')[0] position_z_exit = crossing.timepoint_field( session, 'position_z', 0, 0, 'exit', 'exit')[0] heading_xy_entry = crossing.timepoint_field( session, 'heading_xy', 0, 0, 'entry', 'entry')[0] heading_xz_entry = crossing.timepoint_field( session, 'heading_xz', 0, 0, 'entry', 'entry')[0] heading_xyz_entry = crossing.timepoint_field( session, 'heading_xyz', 0, 0, 'entry', 'entry')[0] heading_xy_peak = crossing.timepoint_field( session, 'heading_xy', 0, 0, 'peak', 'peak')[0] heading_xz_peak = crossing.timepoint_field( session, 'heading_xz', 0, 0, 'peak', 'peak')[0] heading_xyz_peak = crossing.timepoint_field( session, 'heading_xyz', 0, 0, 'peak', 'peak')[0] heading_xy_exit = crossing.timepoint_field( session, 'heading_xy', 0, 0, 'exit', 'exit')[0] heading_xz_exit = crossing.timepoint_field( session, 'heading_xz', 0, 0, 'exit', 'exit')[0] heading_xyz_exit = crossing.timepoint_field( session, 'heading_xyz', 0, 0, 'exit', 'exit')[0] crossing.feature_set_basic = models.CrossingFeatureSetBasic( position_x_entry=position_x_entry, position_y_entry=position_y_entry, position_z_entry=position_z_entry, position_x_peak=position_x_peak, position_y_peak=position_y_peak, position_z_peak=position_z_peak, position_x_exit=position_x_exit, position_y_exit=position_y_exit, position_z_exit=position_z_exit, heading_xy_entry=heading_xy_entry, heading_xz_entry=heading_xz_entry, heading_xyz_entry=heading_xyz_entry, heading_xy_peak=heading_xy_peak, heading_xz_peak=heading_xz_peak, heading_xyz_peak=heading_xyz_peak, heading_xy_exit=heading_xy_exit, heading_xz_exit=heading_xz_exit, heading_xyz_exit=heading_xyz_exit) session.add(crossing) commit(session)
from db_api.connect import session, commit from db_api import models FRUIT_FLY_PARAMS = { 'speed_threshold': 0.03, # m/s 'dist_from_wall_threshold': 0.01, # m 'min_pause_length': 10, # hundredths of a second 'min_trajectory_length': 50, # hundredths of a second } MOSQUITO_PARAMS = { 'speed_threshold': 0.03, # m/s 'dist_from_wall_threshold': 0.01, # m 'min_pause_length': 10, # hundredths of a second 'min_trajectory_length': 50, # hundredths of a second } for param, value in FRUIT_FLY_PARAMS.items(): tcp = models.TrajectoryCleaningParameter(insect='fruit_fly', param=param, value=value) session.add(tcp) for param, value in MOSQUITO_PARAMS.items(): tcp = models.TrajectoryCleaningParameter(insect='mosquito', param=param, value=value) session.add(tcp) commit(session)
def main(): n_timesteps = TIME_AVG_END - TIME_AVG_START for expt in session.query(models.Experiment): for cg in session.query(models.CrossingGroup).\ filter(models.CrossingGroup.experiment == expt).\ filter(models.CrossingGroup.odor_state == 'on').\ filter(models.Threshold.determination == 'arbitrary'): print('Crossings group: "{}"'.format(cg.id)) for th_val in DISCRIMINATION_THRESHOLD_VALUES[expt.insect]: crossings_below = session.query(models.Crossing).\ filter(models.Crossing.crossing_group == cg).\ filter(models.Crossing.max_odor < th_val).all() crossings_above = session.query(models.Crossing).\ filter(models.Crossing.crossing_group == cg).\ filter(models.Crossing.max_odor >= th_val).all() responses_below = np.nan * np.ones((len(crossings_below), n_timesteps), dtype=float) responses_above = np.nan * np.ones((len(crossings_above), n_timesteps), dtype=float) # fill in values for crossing, response in zip(crossings_below, responses_below): response_var = crossing.timepoint_field(session, RESPONSE_VAR, first=TIME_AVG_START, last=TIME_AVG_END - 1, first_rel_to=TIME_AVG_REL_TO, last_rel_to=TIME_AVG_REL_TO) response[:len(response_var)] = response_var for crossing, response in zip(crossings_above, responses_above): response_var = crossing.timepoint_field(session, RESPONSE_VAR, first=TIME_AVG_START, last=TIME_AVG_END - 1, first_rel_to=TIME_AVG_REL_TO, last_rel_to=TIME_AVG_REL_TO) response[:len(response_var)] = response_var diff, lb, ub = get_time_avg_response_diff_and_bounds(responses_below, responses_above) if len(crossings_below) == 0 or len(crossings_above) == 0: diff = None lb = None ub = None disc_th = models.DiscriminationThreshold(crossing_group=cg, odor_threshold=th_val, n_crossings_below=len(crossings_below), n_crossings_above=len(crossings_above), time_avg_start=TIME_AVG_START, time_avg_end=TIME_AVG_END, time_avg_rel_to=TIME_AVG_REL_TO, variable=RESPONSE_VAR, time_avg_difference=diff, lower_bound=lb, upper_bound=ub) session.add(disc_th) commit(session)