def is_point_partof_met(point): point_is_partof_met = False point_as_list = [point] for met in mets_of_all_patches: met_center = met['location']['center'] distance = blobanalysis.point_dist(met_center, point) if distance > ROI_width: continue points_of_met_rel = met['points'] patchID_of_patch_of_met = met['patch_id'] patch_of_met = dc.filter_dicts(whole_scan_metadata['patches'], 'id', patchID_of_patch_of_met)[0] offset_of_patch_of_met = patch_of_met['offset'] points_of_met = np.add(points_of_met_rel, offset_of_patch_of_met).tolist() point_is_partof_met = blobanalysis.test_overlap( point_as_list, points_of_met) if point_is_partof_met: print('Point ' + str(point) + ' is part of met ' + str(met['global_id'])) break return point_is_partof_met
def get_patch_ID_given_offset(offset): offset = np.array(offset, dtype=np.int64) patches = dc.filter_dicts(whole_scan_metadata['patches'], 'offset', offset) patch = patches[0] patch_ID = patch['id'] return patch_ID
def get_whole_mouse_thumbnail(candidate_ID): current_metastasis = get_current_metastasis(candidate_ID) patch_ID = current_metastasis['patch_id'] patches = region['patches'] patch = dataconversions.filter_dicts(patches, 'id', patch_ID)[0] patchstep = patch['patchstep'] z_patchstep = patchstep[2] whole_mouse_thumbnail = whole_mouse_thumbnails[:, :, z_patchstep] return whole_mouse_thumbnail
def get_lower_left_corner(candidate_ID): current_metastasis = get_current_metastasis(candidate_ID) patch_ID = current_metastasis['patch_id'] patches = region['patches'] patch = dataconversions.filter_dicts(patches, 'id', patch_ID)[0] patchstep = patch['patchstep'] lower_left_x = patchstep[1] * 30 lower_left_y = patchstep[0] * 30 lower_left_corner = (lower_left_x, lower_left_y) return lower_left_corner
def pick_sample(): samplecards = filehandling.pload(DATAPATH + '/mice_metadata/' + 'list_of_samplecards.pickledump') mice = ['H2030IC10dn573','IC2dn2','IC6dn1','IC6dn2', 'IC14dn1', 'MCF7IC21dn528', 'PC14dn04595'] print() print("Please choose a mouse:") for m in enumerate(mice): print('[', m[0], ']', ' ', m[1]) chosen_index = input('--> ') mouse = mice[int(chosen_index)] print() print("What's the patch ID?") patch_ID = input("--> ") patch_ID = int(patch_ID) print() print("What's the metastasis ID?") met_ID = input("--> ") met_ID = int(met_ID) samplecards_mouse = dataconversions.filter_dicts(samplecards, 'mouse', mouse) samplecards_patch = dataconversions.filter_dicts(samplecards_mouse, 'patch_id', patch_ID) samplecards_met = dataconversions.filter_dicts(samplecards_patch, 'met_id', met_ID) chosen_samplecard = samplecards_met if chosen_samplecard: print() print('Chosen sample:') print(chosen_samplecard) print() sample_set = MetDataset(chosen_samplecard) sample_loader = DataLoader(sample_set) for i, (images, label) in enumerate(sample_loader): return images else: return None
def is_met_in_ROI(met, ROI, ROI_center, is_ambig_met=False): met_is_in_ROI = False met_center = met['location']['center'] distance = blobanalysis.point_dist(met_center, ROI_center) met_descr_string = 'Ambig-Met' if is_ambig_met else 'Met' met_descr_string += ' ' + str(met['id']) + ' of Patch ' + str( met['patch_id']) # is if distance < ROI_width_half: met_is_in_ROI = True # is or isn't elif distance < (sqrt(3) * ROI_width_half + tolerance): points_of_met_rel = met[ 'points'] # pointlist of all points of met, relative to patch patchID_of_patch_of_met = met['patch_id'] patch_of_met = dc.filter_dicts(whole_scan_metadata['patches'], 'id', patchID_of_patch_of_met)[0] offset_of_patch_of_met = patch_of_met['offset'] points_of_met = np.add( points_of_met_rel, offset_of_patch_of_met).tolist( ) # pointlist of all points of met, abs. positions points_of_ROI = get_points_of_ROI( ROI, ROI_center) # pointlist of all points of ROI, abs. positions # overlap_points = blobanalysis.get_overlap(points_of_met, points_of_ROI) met_is_in_ROI = blobanalysis.test_overlap(points_of_met, points_of_ROI) # isn't else: met_is_in_ROI = False # if met_is_in_ROI: print(met_descr_string + ' is within ROI') return met_is_in_ROI
def load_segmentation(dataset, pid, load_partial, THR_dim=200): ''' segmentation = load_segmentation(dataset,pid,load_partial,THR_dim=200) Not only loads segmentation file for desired patch, but also includes all (dim) metastases that belong to adjacent patches if they partially overlap with this patch. For those cases, only the overlapping part of the metastasis is included, i.e. this function may NOT BE USED to derive statistical information on the metastases as this will be false for exactly those partial metastases. Also, if the part within the patch is small (less than 20% of total volume) or dim, it will only be included as a "dim" metastasis, i.e. detection will not be rewarded and missing it will not be punished; these metastases will have to be detected from the patch in which the majority of its volume is in Inputs: - dataset: string specifying dataset ("F15") - pid: integer for patch ID to be loaded - load_partial: boolean whether or not to load partially overlapping blobs from surrounding patches - THR_dim: threshold to check whether partial blobs are now considered dim because brighter parts are in other patch ''' seg = filehandling.pload(BASEP + 'data/' + dataset + '/segmentations/segmentation_patch_' + str(pid)) if (os.path.isfile('/mnt/C286054386053985/oschoppe/F15/patchvolume_' + str(pid) + '.nii') == False): print( 'WARNING: cannot load local patch files and thus, cannot consider partial blobs properly.' ) print( '--> function load_segmentation() will be called with load_partial=False instead' ) print('ALTERED PATHS IN p_leo2 AND ALL HELPER FUNCTIONS!') load_partial = False if (load_partial): region = filehandling.pload(BASEP + 'data/' + dataset + '/region') [maxy, maxx, maxz] = np.asarray(region['partitioning']['patch_size'] ) - region['partitioning']['patch_overlap'] [y0, x0, z0] = region['patches'][pid]['patchstep'] cancervol = filehandling.readNifti( '/mnt/C286054386053985/oschoppe/F15/patchvolume_' + str(pid)) add_part_metastases = [] add_part_dim_metastases = [] # Go through neighboring patches whose buffer zones overlap with patch for dy in [-1, 0]: for dx in [-1, 0]: for dz in [-1, 0]: [y, x, z] = [y0 + dy, x0 + dx, z0 + dz] if ((y >= 0 and x >= 0 and z >= 0) and ((y == y0 and x == x0 and z == z0) is False)): npid = dataconversions.filter_dicts( region['patches'], 'patchstep', [y, x, z])[0]['id'] nseg = filehandling.pload( BASEP + 'data/' + dataset + '/segmentations/segmentation_patch_' + str(npid)) # check whether any metastases overlap with patch dim_metastases = [] if ( 'dim_metastases' not in nseg.keys()) else nseg['dim_metastases'] for m in nseg['metastases'] + dim_metastases: abs_bb = m['offset'] + m['boundingbox'] if ((abs_bb[0] >= (-1) * dy * maxy) and (abs_bb[1] >= (-1) * dx * maxx) and (abs_bb[2] >= (-1) * dz * maxz)): # if so, then include THOSE points WITHIN the patch & adjust coordinate system to new patch patchstep_offset = np.multiply( [maxy, maxx, maxz], [dy, dx, dz]) shifted_points = m['points'] + patchstep_offset filtered_points = shifted_points[ np.min(shifted_points, axis=1) >= 0] # only take points fully within patch if (len(filtered_points) > 0): # characterize new partial blob m_partial = {} m_partial['id'] = 100 * npid + m['id'] m_partial[ 'points'] = filtered_points.tolist() m_partial = blobanalysis.characterize_blob( m_partial) # ~ 0.1 s m_partial = characterize_metastasis( m_partial, cancervol, min_padding=25, otherblobs=seg['metastases'] ) # ~ 0.01 s # add to main list if NOT dim and at least 20% within patch if (m_partial['characterization']['maxFG'] > THR_dim and m_partial['volume'] >= 0.2 * m['volume']): m_partial[ 'INFO'] = 'Partial, but substantial metastases from adjacent patch #' + str( npid) add_part_metastases.append(m_partial) else: m_partial['evaluation'][ 'flag_dim'] = True m_partial[ 'INFO'] = 'Partial, but dim/very small part of metastases from adjacent patch #' + str( npid) add_part_dim_metastases.append( m_partial) seg['metastases'] += add_part_metastases if ('dim_metastases' in seg.keys()): seg['dim_metastases'] += add_part_dim_metastases return seg
fileToWriteTo = filepath + filename plt.imsave(fileToWriteTo, MIP_x, format='png') MIP_z = np.max(ROI, 2) # maximum intensity projection along z axis = 'z' filename = filename_prefix + '_' + axis + '.png' fileToWriteTo = filepath + filename plt.imsave(fileToWriteTo, MIP_z, format='png') #%% Main for mouse in mice: prediction = filehandling.pload(DATAPATH + '/mice_metadata/' + mouse + '/reviewed_prediction.pickledump') candidates = prediction['metastases'] potential_TPs = dataconversions.filter_dicts( candidates, 'evaluation-manually_confirmed', True) confirmed_FPs = dataconversions.filter_dicts( candidates, 'evaluation-manually_confirmed', False) for potential_TP_met in potential_TPs: for channel in channels: print('\n\n') print('### Mouse ', mouse, ' ###') print('### Channel ', channel, ' ###') ROI = crop_ROI(potential_TP_met, mouse, channel) if ROI is not None: filename_prefix = write_ROI_to_Nifti(ROI, potential_TP_met, mouse, channel) write_ROI_to_PNGs(ROI, filename_prefix)
def get_current_metastasis(candidate_ID): current_metastasis = dataconversions.filter_dicts(TP_candidates, 'global_id', candidate_ID)[0] return current_metastasis
d = datetime.datetime.today() DATE_OF_TODAY = d.strftime('%d-%m-%Y') print("Annotator:\t", NAME_OF_ANNOTATOR) print("Today's date:\t", DATE_OF_TODAY) print("Mouse:\t\t", mouse) print() #%% main_fig = plt.figure(num=101) prediction = filehandling.pload(DATAPATH + '/mice_metadata/' + mouse + '/reviewed_prediction.pickledump') metastases = prediction['metastases'] TP_candidates = dataconversions.filter_dicts(metastases, 'evaluation-manually_confirmed', True) number_of_candidates = len(TP_candidates) candidate_IDs = [] for TP_candidate in TP_candidates: candidate_IDs.append(TP_candidate['global_id']) all_candidate_IDs = copy.deepcopy(candidate_IDs) print('All candidate IDs: ', all_candidate_IDs) region = filehandling.pload(DATAPATH + '/mice_metadata/' + mouse + '/region.pickledump') whole_mouse_thumbnails = region['thumbnails']['MaxProjections_Z'] #%%
#%% Step 2) For each metastasis, add all relevant meta information for m, metastasis in enumerate(segmentation['metastases']): # double loop as this will be needed for upcoming computations print('Loop 1/2: Adding information for metastasis #' + str(metastasis['global_id'])) # add global location information to metastasis segmentation['metastases'][m]['location'] = {} p_offset = np.asarray(region['patches'][metastasis['patch_id']]['offset']) segmentation['metastases'][m]['location']['offset'] = (p_offset + metastasis['offset']).tolist() segmentation['metastases'][m]['location']['center'] = (p_offset + metastasis['offset'] + metastasis['CoM']).tolist() # Add info from drug channel: are metastases significantly targeted? # --> Yes, if mean(FG) significantly above 1.5*mean(BG) if(dataset == 'F15'): drugvol = filehandling.readNifti(pathDrug + 'patchvolume_' + str(metastasis['patch_id']) + '.nii') otherblobs = dataconversions.filter_dicts(segmentation['metastases'],'patch_id',metastasis['patch_id']) metastasis = p_leo.characterize_drugtargeting(metastasis,drugvol,min_padding=25,thr=1.5,otherblobs=otherblobs) for m, metastasis in enumerate(segmentation['metastases']): print('Loop 2/2: Adding information for metastasis #' + str(metastasis['global_id'])) # compute distance to nearest neighbor metastasis segmentation['metastases'][m]['location']['distNN'] = 99999 segmentation['metastases'][m]['location']['distNNtargeted'] = 99999 segmentation['metastases'][m]['location']['distNNuntargeted'] = 99999 for neighbor in segmentation['metastases']: dist = blobanalysis.point_dist(metastasis['location']['center'],neighbor['location']['center']) if(metastasis['global_id'] is not neighbor['global_id']): segmentation['metastases'][m]['location']['distNN'] = np.min([segmentation['metastases'][m]['location']['distNN'],dist]) if(dataset == 'F15'): if(neighbor['DrugCharacterization']['ttest_result']):