예제 #1
0
def run_synapse_detection_astro(atet_input):
    """
    Run synapse detection and result evalution.  The parameters need to be rethought 

    Parameters
    -------------------
    atet_input : dict 

    Returns
    -------------------
    output_dict : dict 

    """

    query = atet_input['query']
    queryID = atet_input['queryID']
    nQuery = atet_input['nQuery']
    resolution = atet_input['resolution']
    data_location = atet_input['data_location']
    data_region_location = atet_input['data_region_location']
    output_foldername = atet_input['output_foldername']
    region_name = atet_input['region_name']
    layer_mask_str = atet_input['mask_str']
    dapi_mask_str = atet_input['dapi_mask_str']
    mouse_number = atet_input['mouse_number']

    # Load the data
    synaptic_volumes = da.load_tiff_from_astro_query(query,
                                                     data_region_location)

    # Load DAPI mask
    dapi_mask_fn = os.path.join(dapi_mask_str,
                                str(mouse_number) + 'ss-DAPI-mask.tiff')
    dapi_mask = da.imreadtiff(dapi_mask_fn)
    dapi_mask = dapi_mask.astype(np.bool)
    combined_mask = np.logical_not(dapi_mask)
    # Mask data
    synaptic_volumes = mask_synaptic_volumes(synaptic_volumes, combined_mask)

    volume_um3 = get_masked_volume(synaptic_volumes, combined_mask, resolution)
    print(volume_um3)

    # Run Synapse Detection
    print('running synapse detection')
    resultvol = syn.getSynapseDetections_astro(synaptic_volumes, query)

    # Save the probability map to file, if you want
    outputNPYlocation = os.path.join(data_location, output_foldername,
                                     region_name)
    syn.saveresultvol(resultvol, outputNPYlocation, 'query_', queryID)

    thresh = 0.9
    queryresult = compute_measurements(resultvol, query, volume_um3, thresh)

    output_dict = {
        'queryID': queryID,
        'query': query,
        'queryresult': queryresult
    }
    return output_dict
def getSynapseDetectionsMW(synapticVolumes, query, kernelLength=2, edge_win = 3,
                         search_win = 2):
    """
    This function calls the functions needed to run probabilistic synapse detection 

    Parameters
    ----------
    synapticVolumes : dict
        has two keys (presynaptic,postsynaptic) which contain lists of 3D numpy arrays 
    query : dict
        contains the minumum slice information for each channel 
    kernelLength : int
        Minimum 2D Blob Size (default 2)
    edge_win: int
        Edge window (default 8)
    search_win: int
        Search windows (default 2)

    Returns
    ----------
    resultVol : 3D numpy array - final probability map 
    """

    # Data
    presynapticVolumes = synapticVolumes['presynaptic']
    postsynapticVolumes = synapticVolumes['postsynaptic']

    # Number of slices each blob should span 
    preIF_z = query['preIF_z']
    postIF_z = query['postIF_z']

    for n in range(0, len(presynapticVolumes)):

        presynapticVolumes[n] = syn.getProbMap_MW(presynapticVolumes[n], query['preIF'][n]) # Step 1
        presynapticVolumes[n] = syn.convolveVolume(presynapticVolumes[n], kernelLength) # Step 2

        if preIF_z[n] > 1: 
            factorVol = syn.computeFactor(presynapticVolumes[n], int(preIF_z[n])) # Step 3
            presynapticVolumes[n] = presynapticVolumes[n] * factorVol

    for n in range(0, len(postsynapticVolumes)):
        
        postsynapticVolumes[n] = syn.getProbMap_MW(postsynapticVolumes[n], query['postIF'][n]) # Step 1
        postsynapticVolumes[n] = syn.convolveVolume(postsynapticVolumes[n], kernelLength) # Step 2

        if postIF_z[n] > 1:
            factorVol = syn.computeFactor(postsynapticVolumes[n], int(postIF_z[n])) # Step 3
            postsynapticVolumes[n] = postsynapticVolumes[n] * factorVol

    # combinePrePostVolumes(base, adjacent)
    # Step 4 

    #print(len(presynapticVolumes))
    #print(len(postsynapticVolumes))
    if len(postsynapticVolumes) == 0: 
        resultVol = syn.combinePrePostVolumes(presynapticVolumes, postsynapticVolumes, edge_win, search_win)
    else: 
        resultVol = syn.combinePrePostVolumes(postsynapticVolumes, presynapticVolumes, edge_win, search_win)

    return resultVol; 
예제 #3
0
def getImageProbMapCutoutFromFile(channelname, sliceInd, startX, startY,
                                  deltaX, deltaY, filepath):
    """
    Load cutout of a slice of a tiff image

    Parameters
    -----------
    channelname : str
    sliceInd : ind
    startX : ind
    startY : ind
    deltaX : ind
    deltaY : ind
    filepath : str

    Returns
    -----------
    cutout: 2D numpy array
    """

    folderpath = os.path.join(filepath, channelname)
    img = da.imreadtiffSingleSlice(folderpath, sliceInd)
    img.astype(np.float64)

    probimg = syn.getProbMap(img)
    cutout = probimg[startY:(startY + deltaY), startX:(startX + deltaX)]

    return cutout
예제 #4
0
def main():
    # The 2D Blob size xy, units: pixels
    punctumSize = 2
    # Threshold associated with query (optional)
    thresh = 0.7

    listOfQueries = []

    ## QUERY 1
    preIF = ['synapsin_1st.tif']  #corresponds to the file name
    preIF_z = [2]
    postIF = ['PSD95m_1st.tif']
    postIF_z = [2]

    query = {
        'preIF': preIF,
        'preIF_z': preIF_z,
        'postIF': postIF,
        'postIF_z': postIF_z,
        'punctumSize': punctumSize,
        'thresh': thresh
    }
    listOfQueries.append(query)
    data = {'listOfQueries': listOfQueries}
    fn = 'example_queries.json'
    da.writeJSONFile(fn, data)

    # Load Query File
    querylist = syn.loadQueriesJSON(fn)
    print(len(querylist))
예제 #5
0
def main():

    # Example use case of processing detections
    # Load probability map
    metadataFN = '/Users/anish/Documents/Connectome/SynapseAnalysis/data/M247514_Rorb_1/Site3Align2/site3_metadata.json'
    metadata = syn.loadMetadata(metadataFN)

    queryFN = metadata['querylocation']

    # List of Queries
    listOfQueries = syn.loadQueriesJSON(queryFN)

    for n in range(0, len(listOfQueries)):
        fn = os.path.join(metadata['datalocation'], 'resultVol')
        fn = fn + str(n) + '.npy'

        resultVol = np.load(fn)
        print(fn)

        pd.probMapToJSON(resultVol, metadata, listOfQueries[n], n)
예제 #6
0
def main():
    """
    Evaluation of site3 synapse detection results
    """

    # Load metadata
    metadataFN = 'site3_metadata.json'
    metadata = syn.loadMetadata(metadataFN)
    outputJSONlocation = metadata['outputJSONlocation']

    queryFN = metadata['querylocation']
    evalparam = metadata['evalparam']

    # List of Queries
    listOfQueries = syn.loadQueriesJSON(queryFN)
    listOfThresholds = []
    listOfThresholds_to_text = []
    listOfQueries_to_text = []
    listofevals = []
    thresh_list = [0.7, 0.8, 0.9]
    # Evaluate each query individually
    for n, query in enumerate(listOfQueries):
        listOfThresholds.append(query['thresh'])

        for thresh in thresh_list:
            listOfThresholds_to_text.append(thresh)
            query['thresh'] = thresh
            print(query)
            listOfQueries_to_text.append(query)
            queryresult = pd.combineResultVolumes([n], [thresh], metadata,
                                                  evalparam)
            listofevals.append(queryresult)

    pd.printEvalToText(listofevals, listOfQueries_to_text,
                       listOfThresholds_to_text)

    # Combine Queries
    evaluation_parameters = metadata['evalparam']

    pd.combineResultVolumes(list(range(0, len(listOfQueries))),
                            listOfThresholds, metadata, evaluation_parameters)
예제 #7
0
def main():

    # Example use case of the synapse detection pipeline
    # Location of Queries
    queryFN = 'example_queries.json'

    # List of Queries
    list_of_queries = syn.loadQueriesJSON(queryFN)

    # Test the first query
    query = list_of_queries[0]

    # Load the data
    synaptic_volumes = da.load_tiff_from_query(query)

    # Run Synapse Detection
    # Takes ~5 minutes to run
    result_vol = syn.getSynapseDetections(synaptic_volumes, query)

    # Verify Output
    label_vol = measure.label(result_vol > 0.9)
    stats = measure.regionprops(label_vol)
    print(len(stats))  # output should be 5440
예제 #8
0
def measure_yfp(vol, thresh):
    """ measure the number of yfp voxels in a volume

    Parameters
    ------------
    vol - 3d numpy array 
    thresh - float threshold
    Return
    -----------
    result - int 
    """
    probvol = syn.getProbMap(vol)
    probvol = probvol > thresh
    result = np.sum(probvol)
    return result
예제 #9
0
def segment_dendrites(vol, thresh, num_of_dendrites, output_dir):
    """ segment dendrites

    Parameters
    ------------
    vol - 3d numpy array 
    thresh - float threshold
    Return
    -----------
    result - int 
    """
    probvol = syn.getProbMap(vol)
    bw_vol = probvol > thresh
    SE = morph.ball(5)
    bw_vol = morph.closing(bw_vol, SE)
    label_vol = measure.label(bw_vol, connectivity=2)
    stats = measure.regionprops(label_vol)
    # sort by size
    size_list = []
    for stat in stats:
        size_list.append(stat.area)

    ind_list = np.flip(np.argsort(size_list), 0)

    for n in range(0, num_of_dendrites):
        dendrite = stats[ind_list[n]]
        list_of_coords = dendrite.coords
        list_of_coords = np.array(list_of_coords)
        filename = 'dendrite' + str(n) + '.off'
        output_filename = os.path.join(output_dir, filename)
        write_off_file(list_of_coords, output_filename)
        stl_filename = os.path.join(output_dir, 'dendrite' + str(n) + '.stl')
        print('number of points: ', str(len(list_of_coords)))

        # if len(list_of_coords) < 40000:
        #     print('starting meshlab', output_filename)
        #     subprocess.call(["meshlabserver", "-i", output_filename,
        #                      "-o", stl_filename, "-s", "ballpivot.mlx"])
        # else:
        #     print('starting meshlab subsampling', output_filename)
        #     subprocess.call(["meshlabserver", "-i", output_filename,
        #                      "-o", stl_filename, "-s", "ballpivot2.mlx"])

        print('stl created', stl_filename)

    return stl_filename
예제 #10
0
def run_list_of_queries(mouse_number, mouse_project_str, sheet_name):
    """
    run queries in a parallel manner

    Parameters
    -----------------
    mouse_number : int 
    mouse_project_str : str
    sheet_name : str 

    """

    output_foldername = 'results_' + sheet_name
    query_fn = 'queries/' + mouse_project_str + '_queries.json'
    data_location = '/Users/anish/Documents/yi_mice/' + \
        str(mouse_number) + 'ss_stacks/'

    hostname = socket.gethostname()
    if hostname == 'Galicia':
        data_location = '/data5TB/yi_mice/' + str(mouse_number) + 'ss_stacks'
        dapi_mask_str_base = '/data5TB/yi_mice/dapi-masks/' + \
            str(mouse_number) + 'ss_stacks'

    print('Query Filename: ', query_fn)
    print('Data Location: ', data_location)
    print('OutputFoldername: ', output_foldername)
    print('Sheetname: ', sheet_name)

    listOfQueries = syn.loadQueriesJSON(query_fn)
    resolution = {'res_xy_nm': 100, 'res_z_nm': 70}
    region_name_base = 'F00'
    thresh = 0.9

    result_list = []
    num_workers = mp.cpu_count() - 1

    print(num_workers)
    pool = mp.Pool(num_workers)

    atet_inputs_list = []
    mask_location_str = -1
    queryID = 0
    foldernames = []
    for region_num in range(0, 4):
        region_name = region_name_base + str(region_num)
        data_region_location = os.path.join(data_location, region_name)
        dapi_mask_str = os.path.join(dapi_mask_str_base, region_name)

        for nQuery, query in enumerate(listOfQueries):
            foldername = region_name + '-Q' + str(nQuery)
            foldernames.append(foldername)
            print(foldername)

            mask_location_str = -1
            #dapi_mask_str = -1

            atet_input = {
                'query': query,
                'queryID': queryID,
                'nQuery': nQuery,
                'resolution': resolution,
                'data_region_location': data_region_location,
                'data_location': data_location,
                'output_foldername': output_foldername,
                'region_name': region_name,
                'mask_str': mask_location_str,
                'dapi_mask_str': dapi_mask_str,
                'mouse_number': mouse_number
            }
            atet_inputs_list.append(atet_input)

            queryID = queryID + 1

    # Run processes
    result_list = pool.map(sa.run_synapse_detection, atet_inputs_list)

    pool.close()
    pool.join()

    print('Get process results from the output queue')
    sorted_queryresult = sa.organize_result_lists(result_list)

    mouse_df = sa.create_synapse_df(sorted_queryresult, foldernames)
    print(mouse_df)

    fn = sheet_name + '.xlsx'
    df_list = [mouse_df]
    aa.write_dfs_to_excel(df_list, sheet_name, fn)
예제 #11
0
def main():
    """
    Site3 Synaptograms
    """

    metadata_fn = '/Users/anish/Documents/Connectome/SynapseAnalysis/data/M247514_Rorb_1/Site3Align2/site3_metadata_dev.json'
    metadata = syn.loadMetadata(metadata_fn)
    query_fn = metadata['querylocation']
    listOfQueries = syn.loadQueriesJSON(query_fn)
    evalargs = metadata['evalparam']

    # listOfThresholds = []
    # for query in listOfQueries:
    #     listOfThresholds.append(query['thresh'])

    # listOfQueryNumbers = list(range(0, len(listOfQueries)))
    listOfThresholds = [0.8, 0.7, 0.7]
    listOfQueryNumbers = [0, 2, 4]

    queryresult = pd.combineResultVolumes(listOfQueryNumbers, listOfThresholds,
                                          metadata, evalargs)

    data_location = metadata['datalocation']
    outputpath = '/Users/anish/Documents/Connectome/Synaptome-Duke/data/collman17/Site3Align2Stacks/synaptograms/'
    stack_list = [
        'results', 'PSD95', 'synapsin', 'VGlut1', 'GluN1', 'GABA', 'Gephyrin',
        'TdTomato'
    ]
    text_x_offset = 0
    text_y_offset = 5
    win_xy = 4
    win_z = 1

    generateResultTiffStacks(listOfQueryNumbers, listOfThresholds,
                             data_location, metadata['outputNPYlocation'])

    synaptogram_args = {
        'win_xy': win_xy,
        'win_z': win_z,
        'data_location': data_location,
        'stack_list': stack_list,
        'text_x_offset': text_x_offset,
        'text_y_offset': text_y_offset,
        'outputpath': outputpath
    }

    # Detected synapses (True Positives)
    # detected_annotations = queryresult['detected_annotations']
    # synaptogram_args['outputpath'] = os.path.join(outputpath, 'true_positive_detections')
    # for counter, synapse in enumerate(detected_annotations):
    #     synaptogram.synapseAnnoToSynaptogram(synapse, synaptogram_args)

    # False negatives
    missed_annotations = queryresult['missed_annotations']
    synaptogram_args['outputpath'] = os.path.join(outputpath, 'false_negative')
    for counter, synapse in enumerate(missed_annotations):
        synaptogram.synapseAnnoToSynaptogram(synapse, synaptogram_args)

    # False positive detections
    false_positives = queryresult['false_positives']
    synaptogram_args['outputpath'] = os.path.join(outputpath, 'false_positive')
    for synapse in false_positives:
        synaptogram.synapseAnnoToSynaptogram(synapse, synaptogram_args)
예제 #12
0
def run_ab_analysis_rayleigh(synaptic_volumes, query, thresh, resolution,
                             target_antibody_name):
    """
    Run AB Analysis - special case 

    MEASURES
    - Puncta Density
    - Average punctum size
    - Standard deviation of the size
    - Synapse density
    - Target Specificity Ratio (tsr)

    Parameters
    -----------
    synaptic_volumes : dict
    query : dict
    thresh : float
    resolution : dict

    Returns
    -----------
    antibody_measure : AntibodyAnalysis()
    """

    antibody_measure = AntibodyAnalysis(query)

    # Get data volume
    antibody_measure.volume_um3 = getdatavolume(synaptic_volumes, resolution)
    print('data volume: ', antibody_measure.volume_um3)

    # Check to see if user supplied blobsize
    if 'punctumSize' in query.keys():
        blobsize = query['punctumSize']
        edge_win = int(np.ceil(blobsize * 1.5))

    # Data
    presynaptic_volumes = synaptic_volumes['presynaptic']
    postsynaptic_volumes = synaptic_volumes['postsynaptic']

    # Number of slices each blob should span
    preIF_z = query['preIF_z']
    postIF_z = query['postIF_z']

    for n in range(0, len(presynaptic_volumes)):
        presynaptic_volumes[n] = syn.getProbMap_rayleigh(
            presynaptic_volumes[n])  # Step 1
        presynaptic_volumes[n] = syn.convolveVolume(presynaptic_volumes[n],
                                                    blobsize)  # Step 2
        if preIF_z[n] > 1:
            factor_vol = syn.computeFactor(presynaptic_volumes[n],
                                           int(preIF_z[n]))  # Step 3
            presynaptic_volumes[n] = presynaptic_volumes[n] * factor_vol

    # Compute single channel measurements
    antibody_measure = compute_single_channel_measurements(
        presynaptic_volumes, antibody_measure, thresh, 'presynaptic')
    print('Computed presynaptic single channel measurements')

    for n in range(0, len(postsynaptic_volumes)):
        postsynaptic_volumes[n] = syn.getProbMap_rayleigh(
            postsynaptic_volumes[n])  # Step 1
        postsynaptic_volumes[n] = syn.convolveVolume(postsynaptic_volumes[n],
                                                     blobsize)  # Step 2
        if postIF_z[n] > 1:
            factor_vol = syn.computeFactor(postsynaptic_volumes[n],
                                           int(postIF_z[n]))  # Step 3
            postsynaptic_volumes[n] = postsynaptic_volumes[n] * factor_vol

    # Compute single channel measurements
    antibody_measure = compute_single_channel_measurements(
        postsynaptic_volumes, antibody_measure, thresh, 'postsynaptic')
    print('Computed postsynaptic single channel measurements')

    if len(postsynaptic_volumes) == 0:
        resultVol = syn.combinePrePostVolumes(presynaptic_volumes,
                                              postsynaptic_volumes, edge_win,
                                              blobsize)
    else:
        resultVol = syn.combinePrePostVolumes(postsynaptic_volumes,
                                              presynaptic_volumes, edge_win,
                                              blobsize)

    # Compute whole statistics
    label_vol = measure.label(resultVol > thresh)
    stats = measure.regionprops(label_vol)
    antibody_measure.synapse_density = len(stats) / antibody_measure.volume_um3
    antibody_measure.synapse_count = len(stats)

    antibody_measure = calculuate_target_ratio(antibody_measure,
                                               target_antibody_name)

    return antibody_measure
예제 #13
0
def run_SACT(synaptic_volumes, query, thresh, resolution,
             target_antibody_name):
    """
    Run SACT. 

    MEASURES
    - Puncta Density
    - Average punctum size
    - Standard deviation of the size
    - Synapse density
    - Target Specificity Ratio (tsr)
    - Raw data mean/std

    Parameters
    -----------
    synaptic_volumes : dict - has two keys, 'postsynaptic' and 'presynaptic.' Each key contains a list of volumes. 
    query : dict
    thresh : float
    resolution : dict

    Returns
    -----------
    antibody_measure : AntibodyAnalysis()
    """

    antibody_measure = AntibodyAnalysis(query)

    # Get data volume
    antibody_measure.volume_um3 = getdatavolume(synaptic_volumes, resolution)
    print('data volume: ', antibody_measure.volume_um3, 'um3')

    # Check to see if user supplied blobsize
    if 'punctumSize' in query.keys():
        blobsize = query['punctumSize']
        edge_win = int(np.ceil(blobsize * 1.5))

    # Data
    presynaptic_volumes = synaptic_volumes['presynaptic']
    postsynaptic_volumes = synaptic_volumes['postsynaptic']

    # Number of slices each blob should span
    preIF_z = query['preIF_z']
    postIF_z = query['postIF_z']

    # Compute raw mean and standard deviation
    antibody_measure = compute_raw_measures(presynaptic_volumes,
                                            antibody_measure, 'presynaptic')

    # SNR test
    raw_presynaptic_volumes = []
    for vol in presynaptic_volumes:
        raw_presynaptic_volumes.append(np.copy(vol))

    for n in range(0, len(presynaptic_volumes)):
        presynaptic_volumes[n] = syn.getProbMap(
            presynaptic_volumes[n])  # Step 1
        presynaptic_volumes[n] = syn.convolveVolume(presynaptic_volumes[n],
                                                    blobsize)  # Step 2
        if preIF_z[n] > 1:
            factor_vol = syn.computeFactor(presynaptic_volumes[n],
                                           int(preIF_z[n]))  # Step 3
            presynaptic_volumes[n] = presynaptic_volumes[n] * factor_vol

    # Compute single channel measurements
    antibody_measure = compute_single_channel_measurements(
        presynaptic_volumes, antibody_measure, thresh, 'presynaptic')

    # SNR test
    antibody_measure = compute_SNR_synapticside(raw_presynaptic_volumes,
                                                presynaptic_volumes, thresh,
                                                antibody_measure,
                                                'presynaptic')

    print('Computed presynaptic single channel measurements')

    # Compute raw mean and standard deviation
    antibody_measure = compute_raw_measures(postsynaptic_volumes,
                                            antibody_measure, 'postsynaptic')

    # SNR test
    raw_postsynaptic_volumes = []
    for vol in postsynaptic_volumes:
        raw_postsynaptic_volumes.append(np.copy(vol))

    for n in range(0, len(postsynaptic_volumes)):
        postsynaptic_volumes[n] = syn.getProbMap(
            postsynaptic_volumes[n])  # Step 1
        postsynaptic_volumes[n] = syn.convolveVolume(postsynaptic_volumes[n],
                                                     blobsize)  # Step 2
        if postIF_z[n] > 1:
            factor_vol = syn.computeFactor(postsynaptic_volumes[n],
                                           int(postIF_z[n]))  # Step 3
            postsynaptic_volumes[n] = postsynaptic_volumes[n] * factor_vol

    # Compute single channel measurements
    antibody_measure = compute_single_channel_measurements(
        postsynaptic_volumes, antibody_measure, thresh, 'postsynaptic')

    # SNR test
    antibody_measure = compute_SNR_synapticside(raw_postsynaptic_volumes,
                                                postsynaptic_volumes, thresh,
                                                antibody_measure,
                                                'postsynaptic')
    print('Computed postsynaptic single channel measurements')

    #"""
    if len(postsynaptic_volumes) == 0:
        resultVol = syn.combinePrePostVolumes(presynaptic_volumes,
                                              postsynaptic_volumes, edge_win,
                                              blobsize)
    else:
        resultVol = syn.combinePrePostVolumes(postsynaptic_volumes,
                                              presynaptic_volumes, edge_win,
                                              blobsize)

    # Compute whole statistics
    label_vol = measure.label(resultVol > thresh)
    stats = measure.regionprops(label_vol)
    antibody_measure.synapse_density = len(stats) / antibody_measure.volume_um3
    antibody_measure.synapse_count = len(stats)

    antibody_measure = calculuate_target_ratio(antibody_measure,
                                               target_antibody_name)
    #"""
    return antibody_measure
예제 #14
0
def main():
    """
    run site 3 synapse detection
    """
    # Load metadata
    metadata_fn = '/Users/anish/Documents/Connectome/SynapseAnalysis/data/M247514_Rorb_1/Site3Align2/site3_metadata.json'
    #metadata_fn = 'site3_metadata_TdTomato.json'
    metadata = syn.loadMetadata(metadata_fn)

    datalocation = metadata['datalocation']
    outputNPYlocation = metadata['outputNPYlocation']
    query_fn = metadata['querylocation']

    # List of Queries
    listOfQueries = syn.loadQueriesJSON(query_fn)
    print("Number of Queries: ", len(listOfQueries))

    for n in range(0, len(listOfQueries)):
        query = listOfQueries[n]
        print(query)

        # Load the data
        synapticVolumes = da.loadTiffSeriesFromQuery(query, datalocation)

        # Run Synapse Detection
        # Takes ~5 minutes to run
        resultVol = syn.getSynapseDetections(synapticVolumes, query)

        # Save the probability map to file, if you want
        syn.saveresultvol(resultVol, outputNPYlocation, 'resultVol', n)

        # Save the thresholded results as annotation objects
        # in a json file
        # pd.probMapToJSON(resultVol, metadata, query, n)
    """
    Evaluation of site3 synapse detection results
    """

    # Load metadata
    metadataFN = 'site3_metadata.json'
    metadata = syn.loadMetadata(metadataFN)
    outputJSONlocation = metadata['outputJSONlocation']

    queryFN = metadata['querylocation']
    evalparam = metadata['evalparam']

    # List of Queries
    listOfQueries = syn.loadQueriesJSON(queryFN)
    listOfThresholds = []
    listOfThresholds_to_text = []
    listOfQueries_to_text = []
    listofevals = []
    thresh_list = [0.7, 0.8, 0.9]
    # Evaluate each query individually
    for n, query in enumerate(listOfQueries):
        listOfThresholds.append(query['thresh'])

        for thresh in thresh_list:
            listOfThresholds_to_text.append(thresh)
            query['thresh'] = thresh
            print(query)
            listOfQueries_to_text.append(query)
            queryresult = pd.combineResultVolumes([n], [thresh], metadata,
                                                  evalparam)
            listofevals.append(queryresult)

    pd.printEvalToText(listofevals, listOfQueries_to_text,
                       listOfThresholds_to_text)

    # Combine Queries
    evaluation_parameters = metadata['evalparam']

    pd.combineResultVolumes(list(range(0, len(listOfQueries))),
                            listOfThresholds, metadata, evaluation_parameters)
예제 #15
0
def run_blob_synapse(mouse_number, mouse_project_str, base_query_num,
                     channel_name):
    """
    Blob Synapse Ratio.  Run SACT for FXS data
    Only runs on Galicia
    """

    query_fn = 'queries/' + mouse_project_str + '_queries.json'

    hostname = socket.gethostname()
    if hostname == 'Galicia':
        data_location = '/data5TB/yi_mice/' + str(mouse_number) + 'ss_stacks'
        dapi_mask_str_base = '/data5TB/yi_mice/dapi-masks/' + \
            str(mouse_number) + 'ss_stacks'

    listOfQueries = syn.loadQueriesJSON(query_fn)

    resolution = {'res_xy_nm': 100, 'res_z_nm': 70}
    region_name_base = 'F00'
    thresh = 0.9

    mask_location_str = -1  # no mask specified
    foldernames = []
    measure_list = []
    target_filenames = []
    conjugate_filenames = []

    for region_num in range(0, 4):
        region_name = region_name_base + str(region_num)
        data_region_location = os.path.join(data_location, region_name)

        query = listOfQueries[base_query_num]

        query_number = base_query_num + 12 * region_num

        foldername = region_name + '-Q' + str(base_query_num)
        foldernames.append(foldername)
        conjugate_filenames.append('Query' + str(base_query_num))

        # Load the data
        synaptic_volumes = da.load_tiff_from_query(query, data_region_location)

        # Load DAPI mask
        dapi_mask_str = os.path.join(dapi_mask_str_base, region_name)
        dapi_mask_fn = os.path.join(dapi_mask_str,
                                    str(mouse_number) + 'ss-DAPI-mask.tiff')
        dapi_mask = da.imreadtiff(dapi_mask_fn)

        # Mask data
        dapi_mask = dapi_mask.astype(np.bool)
        combined_mask = np.logical_not(dapi_mask)  # keep portions without dapi
        synaptic_volumes = sa.mask_synaptic_volumes(synaptic_volumes,
                                                    combined_mask)

        volume_um3 = sa.get_masked_volume(synaptic_volumes, combined_mask,
                                          resolution)
        print(volume_um3)

        target_antibody_name = str(mouse_number) + channel_name
        target_filenames.append(target_antibody_name)
        result_location = os.path.join(
            data_location, 'results_' + str(mouse_number) + 'ss_fragX',
            region_name, 'query_' + str(query_number) + '.npy')

        antibody_measure = run_SACT_FXS(synaptic_volumes, query, thresh,
                                        resolution, target_antibody_name,
                                        result_location, volume_um3)

        measure_list.append(antibody_measure)

    mouse_df = aa.create_df(measure_list, foldernames, target_filenames,
                            conjugate_filenames)

    return mouse_df