def main(): # Example use case of the synapse detection pipeline # Location of Queries queryFN = 'example_queries.json' # List of Queries list_of_queries = syn.loadQueriesJSON(queryFN) # Test the first query query = list_of_queries[0] # Load the data synaptic_volumes = da.load_tiff_from_query(query) # Run Synapse Detection # Takes ~5 minutes to run result_vol = syn.getSynapseDetections(synaptic_volumes, query) # Verify Output label_vol = measure.label(result_vol > 0.9) stats = measure.regionprops(label_vol) print(len(stats)) # output should be 5440
def run_synapse_detection(atet_input): """ Run synapse detection and result evalution. The parameters need to be rethought Parameters ------------------- atet_input : dict Returns ------------------- output_dict : dict """ query = atet_input['query'] queryID = atet_input['queryID'] nQuery = atet_input['nQuery'] resolution = atet_input['resolution'] data_location = atet_input['data_location'] data_region_location = atet_input['data_region_location'] output_foldername = atet_input['output_foldername'] region_name = atet_input['region_name'] layer_mask_str = atet_input['mask_str'] dapi_mask_str = atet_input['dapi_mask_str'] mouse_number = atet_input['mouse_number'] # Load the data synaptic_volumes = da.load_tiff_from_query(query, data_region_location) # Load layer mask if layer_mask_str != -1: layer_mask = Image.open(layer_mask_str) layer_mask = np.array(layer_mask) # Load DAPI mask dapi_mask_fn = os.path.join(dapi_mask_str, str(mouse_number) + 'ss-DAPI-mask.tiff') dapi_mask = da.imreadtiff(dapi_mask_fn) # Merge DAPI mask and Layer 4 mask if layer_mask_str != -1: combined_mask = merge_DAPI_L4_masks(layer_mask, dapi_mask) else: dapi_mask = dapi_mask.astype(np.bool) combined_mask = np.logical_not(dapi_mask) # keep portions without dapi # Mask data synaptic_volumes = mask_synaptic_volumes(synaptic_volumes, combined_mask) volume_um3 = get_masked_volume(synaptic_volumes, combined_mask, resolution) print(volume_um3) # Run Synapse Detection print('running synapse detection') resultvol = syn.getSynapseDetections(synaptic_volumes, query) # Save the probability map to file, if you want outputNPYlocation = os.path.join(data_location, output_foldername, region_name) syn.saveresultvol(resultvol, outputNPYlocation, 'query_', queryID) thresh = 0.9 queryresult = compute_measurements(resultvol, query, volume_um3, thresh) output_dict = { 'queryID': queryID, 'query': query, 'queryresult': queryresult } return output_dict
def main(): """ run site 3 synapse detection """ # Load metadata metadata_fn = '/Users/anish/Documents/Connectome/SynapseAnalysis/data/M247514_Rorb_1/Site3Align2/site3_metadata.json' #metadata_fn = 'site3_metadata_TdTomato.json' metadata = syn.loadMetadata(metadata_fn) datalocation = metadata['datalocation'] outputNPYlocation = metadata['outputNPYlocation'] query_fn = metadata['querylocation'] # List of Queries listOfQueries = syn.loadQueriesJSON(query_fn) print("Number of Queries: ", len(listOfQueries)) for n in range(0, len(listOfQueries)): query = listOfQueries[n] print(query) # Load the data synapticVolumes = da.loadTiffSeriesFromQuery(query, datalocation) # Run Synapse Detection # Takes ~5 minutes to run resultVol = syn.getSynapseDetections(synapticVolumes, query) # Save the probability map to file, if you want syn.saveresultvol(resultVol, outputNPYlocation, 'resultVol', n) # Save the thresholded results as annotation objects # in a json file # pd.probMapToJSON(resultVol, metadata, query, n) """ Evaluation of site3 synapse detection results """ # Load metadata metadataFN = 'site3_metadata.json' metadata = syn.loadMetadata(metadataFN) outputJSONlocation = metadata['outputJSONlocation'] queryFN = metadata['querylocation'] evalparam = metadata['evalparam'] # List of Queries listOfQueries = syn.loadQueriesJSON(queryFN) listOfThresholds = [] listOfThresholds_to_text = [] listOfQueries_to_text = [] listofevals = [] thresh_list = [0.7, 0.8, 0.9] # Evaluate each query individually for n, query in enumerate(listOfQueries): listOfThresholds.append(query['thresh']) for thresh in thresh_list: listOfThresholds_to_text.append(thresh) query['thresh'] = thresh print(query) listOfQueries_to_text.append(query) queryresult = pd.combineResultVolumes([n], [thresh], metadata, evalparam) listofevals.append(queryresult) pd.printEvalToText(listofevals, listOfQueries_to_text, listOfThresholds_to_text) # Combine Queries evaluation_parameters = metadata['evalparam'] pd.combineResultVolumes(list(range(0, len(listOfQueries))), listOfThresholds, metadata, evaluation_parameters)