Exemple #1
0
else:
    properties = ['DSI', 'reliability', 'reliability', 'BF']

# Generate pandas dataframes

plot_x = 'SNR'
cluster_analysis_functions_v2.plot_df_dataset(rois_df, plot_x, properties, exp_ID=('%s_%s' % (current_movie_ID, extraction_type)),
                                              save_fig=True, save_dir=figure_save_dir)

plt.close('all')
# %% PART 4: Save data
os.chdir('/Users/burakgur/Documents/GitHub/python_lab/2p_calcium_imaging')
varDict = locals()
pckl_save_name = ('%s_%s' % (current_movie_ID, extraction_type))
saveWorkspace(outDir=os.getcwd(),
              baseName=pckl_save_name, varDict=varDict, varFile='varSave_cluster_v2.txt',
              extension='.pickle')
cluster_analysis_params['save_output_dir']
print('%s saved...' % pckl_save_name)

# %% Plot distance to midline
import seaborn as sns
from matplotlib.colors import LogNorm

dist, distmask = ROI_mod.calculate_distance_from_region(final_rois)
dist = np.array(dist) * x_size
distmask = np.array(distmask) * x_size
bf_image = ROI_mod.generate_colorMasks_properties(final_rois, 'BF')
bfs = list(map(lambda roi: roi.BF, final_rois))
dist_df = pd.DataFrame.from_dict({'BF': bfs, 'Distance': dist})
plt.style.use("dark_background")
final_rois = separated_rois

# Plot RF map and screen
plt.subplot(121)
PD_image, alpha_image = ROI_mod.generate_colorMasks_properties(
    final_rois, 'PD')
plt.imshow(PD_image, cmap='hsv', alpha=.5)
plt.colorbar()
plt.imshow(mean_image, cmap='gist_gray', alpha=.4)
plt.title('PD map')
plt.axis('off')
plt.subplot(122)
plt.imshow(rfs, cmap='hsv', alpha=.5)
plt.colorbar()
plt.imshow(screen, cmap='binary', alpha=.3)
plt.title('RF center on screen')
ax = plt.gca()
ax.axis('off')

# %% PART 4: Save data
os.chdir('/Users/burakgur/Documents/GitHub/python_lab/2p_calcium_imaging')
varDict = locals()
pckl_save_name = ('%s_%s' % (current_movie_ID, extraction_type))
saveWorkspace(outDir=cluster_analysis_params['save_output_dir'],
              baseName=pckl_save_name,
              varDict=varDict,
              varFile='varSave_cluster_v2.txt',
              extension='.pickle')

print('%s saved...' % pckl_save_name)
Exemple #3
0
    mean_data =np.mean(curr_data,axis=0)
    plt.errorbar(np.unique(epoch_freqs),mean_data,yerr=yerr,
                 label=('Cluster %d, N: %d ROIs' % (cl_type,len(np.where(prediction==cl_type)[0]))))
    
#    plt.plot(np.transpose(\
#      np.mean(curr_data,axis=0)),
#    label=('Cluster %d, N: %d' % (cl_type,len(np.where(prediction==cl_type)[0]))))

plt.legend()
plt.xscale('log')
plt.ylabel('Normalized dF/F')
plt.xlabel('Temporal Frequency (Hz)')

    

#%%
intRate = 10
# Interpolation of responses to a certain frequency
print('Interpolating to %d Hz', intRate)
interpolationRate = intRate; # Interpolation rate in Hz
interpolatedAllRoi = interpolateTrialAvgROIs(trialAvgAllRoi=trialAvgAllRoi, 
                                             framePeriod=1/frameRate, 
                                             intRate=interpolationRate)
        

# locals() needs to be called within the script that
# generates the variables to be saved
varDict = locals()
savePath = saveWorkspace(outDir=saveOutputDir, baseName=imageID,
                     varDict=varDict, varFile='variablesToSave.txt',
                     extension='.pickle')
Exemple #4
0
pmc.plot_df_dataset(rois_df,
                    data_to_extract,
                    exp_ID=('%s_%s' %
                            (current_movie_ID, extraction_params['type'])),
                    save_fig=True,
                    save_dir=figure_save_dir)
# plt.close('all')

# %% PART 4: Save data
os.chdir('/Users/burakgur/Documents/GitHub/python_lab/2p_calcium_imaging')
varDict = locals()
pckl_save_name = ('%s_%s' % (current_movie_ID, extraction_params['type']))
saveWorkspace(saveOutputDir,
              pckl_save_name,
              varDict,
              varFile='data_save_vars.txt',
              extension='.pickle')

print('\n\n%s saved...\n\n' % pckl_save_name)

#%% Plot ROI summarries
if plot_roi_summ:
    if analysis_type == 'gratings_transfer_rois_save':
        import random
        plt.close('all')
        data_to_extract = [
            'DSI', 'BF', 'SNR', 'reliability', 'uniq_id', 'CSI', 'PD',
            'exp_ID', 'stim_name'
        ]
    #
    final_rois_all.append(workspace['final_rois'])
    flyIDs.append(list(map(lambda roi: roi.exp_ID, workspace['final_rois'])))
    tunings.append(
        np.squeeze(
            list(map(lambda roi: roi.TF_curve_resp, workspace['final_rois']))))
    a, b = ROI_mod.calculate_distance_from_region(final_rois)
    #    rois_dict = ROI_mod.data_to_list(workspace['final_rois'], properties)
    #    curr_df = pd.DataFrame.from_dict(rois_dict)
    #    combined_df = combined_df.append(curr_df, ignore_index=True,sort=False)

    os.chdir(functionPath)
    varDict = locals()
    saveWorkspace(outDir=saveOutputDir,
                  baseName=dataset,
                  varDict=varDict,
                  varFile='varSave_cluster_v2.txt',
                  extension='')

    print('%s saved...' % dataset)

#%% Tuning curves
tuning_curves = np.concatenate((tunings[:]))
all_mean_data = np.mean(tuning_curves, axis=0)
all_yerr = np.std(tuning_curves, axis=0)
epoch_freqs = final_rois[0][0].TF_curve_stim
fly_IDs = np.concatenate((flyIDs[:]))
unique_flies = np.unique(fly_IDs)
norm_tuning_cur_flies = {}
norm_tf_tuning = normalize(tuning_curves, axis=1, norm='max')
for fly_num in unique_flies:
saveOutputDir = os.path.join(initialDirectory, 'analyzed_data',
                             '191220_GluClflpTEV_NI_1')
#%% Combine the datasets and save as new
flyID = '191218bg_fly4'
fly_files = [file_n for file_n in os.listdir(saveOutputDir) \
                              if flyID in file_n.lower()]

for data_file in fly_files:
    #Check if it is one of the desired tseries

    data_path = os.path.join(saveOutputDir, data_file)
    load_path = open(data_path, 'rb')
    workspace = cPickle.load(load_path)
    rois = workspace['final_rois']

    for roi in rois:
        roi.experiment_info['Genotype'] = 'Pos_Mi1Rec__plus_GluClflpSTOPD'

    corrected_rois = rois
    os.chdir('/Users/burakgur/Documents/GitHub/python_lab/2p_calcium_imaging')
    varDict = {'final_rois': corrected_rois}

    saveWorkspace(outDir=saveOutputDir,
                  baseName=data_file.split('.')[0],
                  varDict=varDict,
                  varFile='varSave_cluster_v2.txt',
                  extension='.pickle')

    print(' %s saved...' % data_file.split('.')[0])
print('\n%s genotypes adjusted...\n' % flyID)
Exemple #7
0
def dataProcessSave(t_series_path,
                    stimInputDir,
                    saveOutputDir,
                    imageID,
                    current_exp_ID,
                    use_aligned=True,
                    intRate=10):
    """ Processes the data and saves the necessary variables

    Parameters
    ==========
    t_series_path : str

        Path of the T-series that includes the motion correction directory
        along with stimulus output and xml file.
        
    stimInputDir : str

        Path of the folder where stimulus input files are located. These files
        contain information about all the stimuli used in the experiments.
        
    saveOutputDir : str

        Path of the folder where the data output files will be saved
        
    imageID : str

        The unique ID of the image data to be saved
        
    current_exp_ID : str

        The experiment ID of the image data to be saved
        
    use_aligned: bool, optional
        Default: True
        
        Defines if aligned 'motCorr.sima' or non-aligned 'TIFFs.sima' will be used.
    
    intRate: int, optional
        Default: 10
        
        The rate which data will be interpolated.
        
  
    Returns
    =======
    
    """
    if use_aligned:
        print('Using the aligned sequences for extraction')
        dataDir = os.path.join(t_series_path, 'motCorr.sima')
    else:
        print('Using the non-aligned sequences for extraction')
        dataDir = os.path.join(t_series_path, 'TIFFs.sima')

    t_series_name = os.path.basename(t_series_path)
    # Finding the xml file and retrieving relevant information
    xmlPath = os.path.join(t_series_path, '*-???.xml')
    xmlFile = (glob.glob(xmlPath))[0]
    micRelTimes = getMicRelativeTime(xmlFile)

    #  Finding the frame period (1/FPS) and layer position
    framePeriod = getFramePeriod(xmlFile=xmlFile)
    layerPosition = getLayerPosition(xmlFile=xmlFile)

    # Finding and reading the stimulus output file, extracting relevant info
    stimOutPath = os.path.join(t_series_path, '_stimulus_output_*')
    stimOutFile = (glob.glob(stimOutPath))[0]
    (stimType, rawStimData) = readStimOut(stimOutFile=stimOutFile,
                                          skipHeader=1)
    (stimInputFile,
     stimInputData) = readStimInformation(stimType=stimType,
                                          stimInputDir=stimInputDir)
    stimName = os.path.basename(stimInputFile)
    isRandom = int(stimInputData['Stimulus.randomize'][0])
    epochDur = stimInputData['Stimulus.duration']
    epochDur = [float(sec) for sec in epochDur]

    # Finding epoch coordinates and number of trials
    epochCount = getEpochCount(rawStimData=rawStimData, epochColumn=3)
    (trialCoor, trialCount, isRandom) = divideEpochs(rawStimData=rawStimData,
                                                     epochCount=epochCount,
                                                     isRandom=isRandom,
                                                     framePeriod=framePeriod,
                                                     trialDiff=0.20,
                                                     overlappingFrames=0,
                                                     firstEpochIdx=0,
                                                     epochColumn=3,
                                                     imgFrameColumn=7,
                                                     incNextEpoch=True,
                                                     checkLastTrialLen=True)

    # Signal extraction and background subtraction
    print('Signal extraction...')
    (signalFile, chNames, usedChannel, roiKeys, usedRoiKey,
     usedExtLabel) = extractRawSignal(motCorrDir=dataDir)

    # ROI information, header includes the ROI numbers
    # tags include the types of ROIs e.g. Layer1
    (header, bgIndex, tags) = getROIinformation(signalFile=signalFile,
                                                bgLabel=['bg', 0])
    (bgSub, rawTrace) = subtractBg(signalFile=signalFile,
                                   bgIndex=bgIndex,
                                   skipHeader=3)

    # Calculating dF/F according to the baseline type
    if isRandom == 1:  # There is an epoch used for baseline
        baselineEpochPresent = True
        baselineDurationBeforeEpoch = 1.5  # In seconds
        baseDur = int(baselineDurationBeforeEpoch / framePeriod)  # In frames
    else:  # Presumably no epoch present for baseline, taking the mean of trial
        baselineEpochPresent = False
        baselineDurationBeforeEpoch = np.nan
        baseDur = np.nan

    (dffTraceAllRoi, baselineStdAllRoi,
     baselineMeanAllRoi) = dff(trialCoor=trialCoor,
                               header=header,
                               bgIndex=bgIndex,
                               bgSub=bgSub,
                               baselineEpochPresent=baselineEpochPresent,
                               baseDur=baseDur)
    # Trial averaging
    trialAvgAllRoi = trialAverage(dffTraceAllRoi=dffTraceAllRoi,
                                  bgIndex=bgIndex)
    # Correlation with stimulus
    (corrHeader, pvalHeader) = corrTrialAvg(trialAvgAllRoi=trialAvgAllRoi,
                                            epochDur=epochDur,
                                            bgIndex=bgIndex,
                                            framePeriod=framePeriod)

    # Interpolation of responses to a certain frequency
    print('Interpolating to %d Hz', intRate)
    interpolationRate = intRate
    # Interpolation rate in Hz
    interpolatedAllRoi = interpolateTrialAvgROIs(trialAvgAllRoi=trialAvgAllRoi,
                                                 framePeriod=framePeriod,
                                                 intRate=interpolationRate)

    # locals() needs to be called within the script that
    # generates the variables to be saved
    varDict = locals()
    savePath = saveWorkspace(outDir=saveOutputDir,
                             baseName=imageID,
                             varDict=varDict,
                             varFile='variablesToSave.txt',
                             extension='.pickle')