Exemple #1
0
def main():
    # The 2D Blob size xy, units: pixels
    punctumSize = 2
    # Threshold associated with query (optional)
    thresh = 0.7

    listOfQueries = []

    ## QUERY 1
    preIF = ['synapsin_1st.tif']  #corresponds to the file name
    preIF_z = [2]
    postIF = ['PSD95m_1st.tif']
    postIF_z = [2]

    query = {
        'preIF': preIF,
        'preIF_z': preIF_z,
        'postIF': postIF,
        'postIF_z': postIF_z,
        'punctumSize': punctumSize,
        'thresh': thresh
    }
    listOfQueries.append(query)
    data = {'listOfQueries': listOfQueries}
    fn = 'example_queries.json'
    da.writeJSONFile(fn, data)

    # Load Query File
    querylist = syn.loadQueriesJSON(fn)
    print(len(querylist))
def main():

    # Example use case of processing detections
    # Load probability map
    metadataFN = '/Users/anish/Documents/Connectome/SynapseAnalysis/data/M247514_Rorb_1/Site3Align2/site3_metadata.json'
    metadata = syn.loadMetadata(metadataFN)

    queryFN = metadata['querylocation']

    # List of Queries
    listOfQueries = syn.loadQueriesJSON(queryFN)

    for n in range(0, len(listOfQueries)):
        fn = os.path.join(metadata['datalocation'], 'resultVol')
        fn = fn + str(n) + '.npy'

        resultVol = np.load(fn)
        print(fn)

        pd.probMapToJSON(resultVol, metadata, listOfQueries[n], n)
Exemple #3
0
def main():
    """
    Evaluation of site3 synapse detection results
    """

    # Load metadata
    metadataFN = 'site3_metadata.json'
    metadata = syn.loadMetadata(metadataFN)
    outputJSONlocation = metadata['outputJSONlocation']

    queryFN = metadata['querylocation']
    evalparam = metadata['evalparam']

    # List of Queries
    listOfQueries = syn.loadQueriesJSON(queryFN)
    listOfThresholds = []
    listOfThresholds_to_text = []
    listOfQueries_to_text = []
    listofevals = []
    thresh_list = [0.7, 0.8, 0.9]
    # Evaluate each query individually
    for n, query in enumerate(listOfQueries):
        listOfThresholds.append(query['thresh'])

        for thresh in thresh_list:
            listOfThresholds_to_text.append(thresh)
            query['thresh'] = thresh
            print(query)
            listOfQueries_to_text.append(query)
            queryresult = pd.combineResultVolumes([n], [thresh], metadata,
                                                  evalparam)
            listofevals.append(queryresult)

    pd.printEvalToText(listofevals, listOfQueries_to_text,
                       listOfThresholds_to_text)

    # Combine Queries
    evaluation_parameters = metadata['evalparam']

    pd.combineResultVolumes(list(range(0, len(listOfQueries))),
                            listOfThresholds, metadata, evaluation_parameters)
Exemple #4
0
def main():

    # Example use case of the synapse detection pipeline
    # Location of Queries
    queryFN = 'example_queries.json'

    # List of Queries
    list_of_queries = syn.loadQueriesJSON(queryFN)

    # Test the first query
    query = list_of_queries[0]

    # Load the data
    synaptic_volumes = da.load_tiff_from_query(query)

    # Run Synapse Detection
    # Takes ~5 minutes to run
    result_vol = syn.getSynapseDetections(synaptic_volumes, query)

    # Verify Output
    label_vol = measure.label(result_vol > 0.9)
    stats = measure.regionprops(label_vol)
    print(len(stats))  # output should be 5440
Exemple #5
0
def run_list_of_queries(mouse_number, mouse_project_str, sheet_name):
    """
    run queries in a parallel manner

    Parameters
    -----------------
    mouse_number : int 
    mouse_project_str : str
    sheet_name : str 

    """

    output_foldername = 'results_' + sheet_name
    query_fn = 'queries/' + mouse_project_str + '_queries.json'
    data_location = '/Users/anish/Documents/yi_mice/' + \
        str(mouse_number) + 'ss_stacks/'

    hostname = socket.gethostname()
    if hostname == 'Galicia':
        data_location = '/data5TB/yi_mice/' + str(mouse_number) + 'ss_stacks'
        dapi_mask_str_base = '/data5TB/yi_mice/dapi-masks/' + \
            str(mouse_number) + 'ss_stacks'

    print('Query Filename: ', query_fn)
    print('Data Location: ', data_location)
    print('OutputFoldername: ', output_foldername)
    print('Sheetname: ', sheet_name)

    listOfQueries = syn.loadQueriesJSON(query_fn)
    resolution = {'res_xy_nm': 100, 'res_z_nm': 70}
    region_name_base = 'F00'
    thresh = 0.9

    result_list = []
    num_workers = mp.cpu_count() - 1

    print(num_workers)
    pool = mp.Pool(num_workers)

    atet_inputs_list = []
    mask_location_str = -1
    queryID = 0
    foldernames = []
    for region_num in range(0, 4):
        region_name = region_name_base + str(region_num)
        data_region_location = os.path.join(data_location, region_name)
        dapi_mask_str = os.path.join(dapi_mask_str_base, region_name)

        for nQuery, query in enumerate(listOfQueries):
            foldername = region_name + '-Q' + str(nQuery)
            foldernames.append(foldername)
            print(foldername)

            mask_location_str = -1
            #dapi_mask_str = -1

            atet_input = {
                'query': query,
                'queryID': queryID,
                'nQuery': nQuery,
                'resolution': resolution,
                'data_region_location': data_region_location,
                'data_location': data_location,
                'output_foldername': output_foldername,
                'region_name': region_name,
                'mask_str': mask_location_str,
                'dapi_mask_str': dapi_mask_str,
                'mouse_number': mouse_number
            }
            atet_inputs_list.append(atet_input)

            queryID = queryID + 1

    # Run processes
    result_list = pool.map(sa.run_synapse_detection, atet_inputs_list)

    pool.close()
    pool.join()

    print('Get process results from the output queue')
    sorted_queryresult = sa.organize_result_lists(result_list)

    mouse_df = sa.create_synapse_df(sorted_queryresult, foldernames)
    print(mouse_df)

    fn = sheet_name + '.xlsx'
    df_list = [mouse_df]
    aa.write_dfs_to_excel(df_list, sheet_name, fn)
def main():
    """
    Site3 Synaptograms
    """

    metadata_fn = '/Users/anish/Documents/Connectome/SynapseAnalysis/data/M247514_Rorb_1/Site3Align2/site3_metadata_dev.json'
    metadata = syn.loadMetadata(metadata_fn)
    query_fn = metadata['querylocation']
    listOfQueries = syn.loadQueriesJSON(query_fn)
    evalargs = metadata['evalparam']

    # listOfThresholds = []
    # for query in listOfQueries:
    #     listOfThresholds.append(query['thresh'])

    # listOfQueryNumbers = list(range(0, len(listOfQueries)))
    listOfThresholds = [0.8, 0.7, 0.7]
    listOfQueryNumbers = [0, 2, 4]

    queryresult = pd.combineResultVolumes(listOfQueryNumbers, listOfThresholds,
                                          metadata, evalargs)

    data_location = metadata['datalocation']
    outputpath = '/Users/anish/Documents/Connectome/Synaptome-Duke/data/collman17/Site3Align2Stacks/synaptograms/'
    stack_list = [
        'results', 'PSD95', 'synapsin', 'VGlut1', 'GluN1', 'GABA', 'Gephyrin',
        'TdTomato'
    ]
    text_x_offset = 0
    text_y_offset = 5
    win_xy = 4
    win_z = 1

    generateResultTiffStacks(listOfQueryNumbers, listOfThresholds,
                             data_location, metadata['outputNPYlocation'])

    synaptogram_args = {
        'win_xy': win_xy,
        'win_z': win_z,
        'data_location': data_location,
        'stack_list': stack_list,
        'text_x_offset': text_x_offset,
        'text_y_offset': text_y_offset,
        'outputpath': outputpath
    }

    # Detected synapses (True Positives)
    # detected_annotations = queryresult['detected_annotations']
    # synaptogram_args['outputpath'] = os.path.join(outputpath, 'true_positive_detections')
    # for counter, synapse in enumerate(detected_annotations):
    #     synaptogram.synapseAnnoToSynaptogram(synapse, synaptogram_args)

    # False negatives
    missed_annotations = queryresult['missed_annotations']
    synaptogram_args['outputpath'] = os.path.join(outputpath, 'false_negative')
    for counter, synapse in enumerate(missed_annotations):
        synaptogram.synapseAnnoToSynaptogram(synapse, synaptogram_args)

    # False positive detections
    false_positives = queryresult['false_positives']
    synaptogram_args['outputpath'] = os.path.join(outputpath, 'false_positive')
    for synapse in false_positives:
        synaptogram.synapseAnnoToSynaptogram(synapse, synaptogram_args)
def main():
    """
    run site 3 synapse detection
    """
    # Load metadata
    metadata_fn = '/Users/anish/Documents/Connectome/SynapseAnalysis/data/M247514_Rorb_1/Site3Align2/site3_metadata.json'
    #metadata_fn = 'site3_metadata_TdTomato.json'
    metadata = syn.loadMetadata(metadata_fn)

    datalocation = metadata['datalocation']
    outputNPYlocation = metadata['outputNPYlocation']
    query_fn = metadata['querylocation']

    # List of Queries
    listOfQueries = syn.loadQueriesJSON(query_fn)
    print("Number of Queries: ", len(listOfQueries))

    for n in range(0, len(listOfQueries)):
        query = listOfQueries[n]
        print(query)

        # Load the data
        synapticVolumes = da.loadTiffSeriesFromQuery(query, datalocation)

        # Run Synapse Detection
        # Takes ~5 minutes to run
        resultVol = syn.getSynapseDetections(synapticVolumes, query)

        # Save the probability map to file, if you want
        syn.saveresultvol(resultVol, outputNPYlocation, 'resultVol', n)

        # Save the thresholded results as annotation objects
        # in a json file
        # pd.probMapToJSON(resultVol, metadata, query, n)
    """
    Evaluation of site3 synapse detection results
    """

    # Load metadata
    metadataFN = 'site3_metadata.json'
    metadata = syn.loadMetadata(metadataFN)
    outputJSONlocation = metadata['outputJSONlocation']

    queryFN = metadata['querylocation']
    evalparam = metadata['evalparam']

    # List of Queries
    listOfQueries = syn.loadQueriesJSON(queryFN)
    listOfThresholds = []
    listOfThresholds_to_text = []
    listOfQueries_to_text = []
    listofevals = []
    thresh_list = [0.7, 0.8, 0.9]
    # Evaluate each query individually
    for n, query in enumerate(listOfQueries):
        listOfThresholds.append(query['thresh'])

        for thresh in thresh_list:
            listOfThresholds_to_text.append(thresh)
            query['thresh'] = thresh
            print(query)
            listOfQueries_to_text.append(query)
            queryresult = pd.combineResultVolumes([n], [thresh], metadata,
                                                  evalparam)
            listofevals.append(queryresult)

    pd.printEvalToText(listofevals, listOfQueries_to_text,
                       listOfThresholds_to_text)

    # Combine Queries
    evaluation_parameters = metadata['evalparam']

    pd.combineResultVolumes(list(range(0, len(listOfQueries))),
                            listOfThresholds, metadata, evaluation_parameters)
Exemple #8
0
def run_blob_synapse(mouse_number, mouse_project_str, base_query_num,
                     channel_name):
    """
    Blob Synapse Ratio.  Run SACT for FXS data
    Only runs on Galicia
    """

    query_fn = 'queries/' + mouse_project_str + '_queries.json'

    hostname = socket.gethostname()
    if hostname == 'Galicia':
        data_location = '/data5TB/yi_mice/' + str(mouse_number) + 'ss_stacks'
        dapi_mask_str_base = '/data5TB/yi_mice/dapi-masks/' + \
            str(mouse_number) + 'ss_stacks'

    listOfQueries = syn.loadQueriesJSON(query_fn)

    resolution = {'res_xy_nm': 100, 'res_z_nm': 70}
    region_name_base = 'F00'
    thresh = 0.9

    mask_location_str = -1  # no mask specified
    foldernames = []
    measure_list = []
    target_filenames = []
    conjugate_filenames = []

    for region_num in range(0, 4):
        region_name = region_name_base + str(region_num)
        data_region_location = os.path.join(data_location, region_name)

        query = listOfQueries[base_query_num]

        query_number = base_query_num + 12 * region_num

        foldername = region_name + '-Q' + str(base_query_num)
        foldernames.append(foldername)
        conjugate_filenames.append('Query' + str(base_query_num))

        # Load the data
        synaptic_volumes = da.load_tiff_from_query(query, data_region_location)

        # Load DAPI mask
        dapi_mask_str = os.path.join(dapi_mask_str_base, region_name)
        dapi_mask_fn = os.path.join(dapi_mask_str,
                                    str(mouse_number) + 'ss-DAPI-mask.tiff')
        dapi_mask = da.imreadtiff(dapi_mask_fn)

        # Mask data
        dapi_mask = dapi_mask.astype(np.bool)
        combined_mask = np.logical_not(dapi_mask)  # keep portions without dapi
        synaptic_volumes = sa.mask_synaptic_volumes(synaptic_volumes,
                                                    combined_mask)

        volume_um3 = sa.get_masked_volume(synaptic_volumes, combined_mask,
                                          resolution)
        print(volume_um3)

        target_antibody_name = str(mouse_number) + channel_name
        target_filenames.append(target_antibody_name)
        result_location = os.path.join(
            data_location, 'results_' + str(mouse_number) + 'ss_fragX',
            region_name, 'query_' + str(query_number) + '.npy')

        antibody_measure = run_SACT_FXS(synaptic_volumes, query, thresh,
                                        resolution, target_antibody_name,
                                        result_location, volume_um3)

        measure_list.append(antibody_measure)

    mouse_df = aa.create_df(measure_list, foldernames, target_filenames,
                            conjugate_filenames)

    return mouse_df